text
stringlengths 2
104M
| meta
dict |
---|---|
<?php
namespace YukisCoffee\CoffeeRequest\Exception;
/**
* Thrown by a Singleton helper class when violating standard
* access permissions.
*
* @author Taniko Yamamoto <[email protected]>
*/
class MethodPrivacyException extends BaseException {} | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace YukisCoffee\CoffeeRequest\Exception;
use Exception;
/**
* Base exception class for all CoffeeRequest exceptions.
*
* @author Taniko Yamamoto <[email protected]>
*/
abstract class BaseException extends Exception {} | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace YukisCoffee\CoffeeRequest\Debugging;
// An exception is used to get the top-level stacktrace.
use Exception;
/**
* Implements a simplified stack trace for Promises.
*
* This makes user debugging easier as it prioritises the actual direction of
* the Promise in user-implemented code while hiding abstractions made within
* this library.
*
* As such, the stack trace in debug outputs can be made less misleading. The
* programmer will not see anything about Loop.php in a Promise exception, and
* will instead first see the last known file with the Promise, which is
* considerably more useful.
*
* The original (full) stack trace is preserved for advanced readings.
*
* @author Taniko Yamamoto <[email protected]>
*/
class PromiseStackTrace
{
/**
* Stores the original stack trace at the time of construction.
*
* The simplified stack trace is only formed when called upon, so only this
* is stored.
*/
private array $originalTrace;
private static array $skippedFiles = [];
/**
* Registers a filename (class) to be skipped in reading the stack trace of
* the Promise.
*
* Since knowing the origin of the error is more important than knowing the
* underlying behaviour in most cases, the primary stack trace shown is
* simplified in order to prioritise the desired information.
*
* By default, these are all classes that are involved in abstractions with
* the Promise system.
*/
public static function registerSkippedFile(string $filename): void
{
self::$skippedFiles[] = $filename;
}
/**
* Unregisters a filename (class) to be skipped in reading the stack trace.
*
* If the file is not present in the list, this will be skipped silently.
*/
public static function unregisterSkippedFile(string $filename): void
{
if ($pos = array_search($filename, self::$skippedFiles))
{
array_splice(self::$skippedFiles, $pos, 1);
}
}
public function __construct()
{
$this->originalTrace = (new Exception)->getTrace();
}
public function __toString()
{
return $this->getTraceAsString();
}
/**
* Gets the simplified trace as an array.
*/
public function getTraceAsArray(): array
{
$result = [];
foreach ($this->originalTrace as $item)
{
if (!in_array($item["file"], self::$skippedFiles))
{
$result[] = $item;
}
}
return $result;
}
/**
* Gets the simplified trace as a string.
*/
public function getTraceAsString(): string
{
return self::stringifyTrace($this->getTraceAsArray());
}
/**
* Gets the original (advanced) trace as an array.
*/
public function getOriginalTraceAsArray(): array
{
return $this->originalTrace;
}
/**
* Gets the original (advanced) trace as a string.
*/
public function getOriginalTraceAsString(): string
{
return self::stringifyTrace($this->getOriginalTraceAsArray());
}
/**
* Creates a stringified stack trace (similar to PHP's Exceptions).
*/
private static function stringifyTrace(array $trace): string
{
$result = "";
$i = 0;
foreach ($trace as $item)
{
$file = $item["file"];
$line = $item["line"];
$function = $item["function"];
if (!is_string($file))
{
$file = "[unknown file]";
}
if (isset($line) && is_int($line))
{
$file .= "($line)";
}
if (!is_string($function))
{
$result .= "#$i [internal function]";
continue;
}
else
{
$result .= "#$i $file: $function(";
}
if (!empty($item["args"]))
{
$args = $item["args"];
$alreadyHasArgument = false;
foreach ($args as $argument)
{
// If there's already an argument, add ", " for formatting.
if ($alreadyHasArgument)
{
$result .= ", ";
}
switch (gettype($argument))
{
case "string":
$result .= '"';
$formattedString = str_replace('"', "\\\"", $argument);
if (strlen($formattedString) > 10)
{
$formattedString = substr($formattedString, 0, 10) . "...";
}
$result .= $formattedString;
$result .= '"';
break;
case "integer":
$result .= $argument;
break;
case "double":
$result .= sprintf("%lf", $argument);
break;
case "boolean":
$result .= $argument ? "true" : "false";
break;
case "object":
$result .= "Object(" . get_class($argument) . ")";
break;
case "array":
$result .= "Array";
break;
case "NULL":
$result .= "null";
break;
case "resource":
case "resource (closed)":
$result .= "Resource id #" . get_resource_id($argument);
break;
case "unknown type":
$result .= "[unknown type]";
break;
}
$alreadyHasArgument = true;
}
}
$result .= ")\n";
$i++;
}
$result .= "#$i {main}\n";
return $result;
}
} | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller;
use Com\Youtube\Innertube\Helpers\VideosContinuationWrapper;
use Rehike\Network;
use Rehike\Util\WebV2Shelves;
use Rehike\Util\RichShelfUtils;
use Rehike\Model\Feed\MFeedAppbarNav;
use Rehike\Signin\API as SignIn;
use \Com\Youtube\Innertube\Request\BrowseRequestParams;
use \Rehike\Util\Base64Url;
use \Rehike\Model\History\HistoryModel;
use \Rehike\Model\Browse\InnertubeBrowseConverter;
use \Rehike\Util\ParsingUtils;
use function Rehike\Async\async;
/**
* Common controller for all feed pages.
*
* This includes the homepage, Trending page, Subscriptions page, and many
* other ones.
*
* Feeds are one of the most complicated and varying parts of InnerTube and
* YouTube's internal structure, but also common enough that it's only
* reasonable to share code for them.
*
* That said, it's very difficult to make this work just right. So be warned,
* this may be the buggiest part of Rehike.
*
* @author Aubrey Pankow <[email protected]>
* @author Taniko Yamamoto <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends \Rehike\Controller\core\NirvanaController {
public $template = "feed";
/**
* IDs of feeds to add the "common feed appbar" on.
*
* Since 2015, YouTube has used this to create horizontal "tabs" between
* the homepage, trending page, and subscriptions page.
*
* @see MFeedAppbarNav
*/
const FEED_APPBAR_SUPPORTED_IDS = [
"FEwhat_to_watch",
"FEtrending",
"FEsubscriptions"
];
/**
* IDs of feeds that require the user to be signed in to access.
*
* If the user is signed out, they will be redirected to the homepage. This
* is to maintain compatibility with the standard YouTube server.
*/
const SIGNIN_REQUIRED_IDS = [
"FEsubscriptions"
];
public function onGet(&$yt, $request) {
$feedId = $request->path[1] ?? "what_to_watch";
$feedId = "FE" . $feedId;
$this->setEndpoint("browse", $feedId);
if (in_array($feedId, self::FEED_APPBAR_SUPPORTED_IDS)) {
$yt->appbar->nav = new MFeedAppbarNav($feedId);
}
if (!SignIn::isSignedIn() && in_array($feedId, self::SIGNIN_REQUIRED_IDS)) {
header("Location: /");
}
switch ($feedId) {
case "FEwhat_to_watch":
self::whatToWatch($yt);
break;
case "FEsubscriptions":
self::subscriptions($yt, $request);
break;
default:
self::miscFeeds($yt, $request, $feedId);
break;
}
}
/**
* Home page.
*
* Internally, the homepage is known as FEwhat_to_watch, which corresponds
* with its older name "What to Watch".
*/
private static function whatToWatch(&$yt) {
return async(function() use ($yt)
{
// The copyright text in the description only appeared if the
// user originated from the homepage.
$yt->footer->enableCopyright = true;
// The homepage also had the searchbox in the masthead autofocus.
$yt->masthead->searchbox->autofocus = true;
// Initial Android request to get continuation
$response = yield Network::innertubeRequest(
action: "browse",
body: [
"browseId" => "FEwhat_to_watch"
],
clientName: "ANDROID",
clientVersion: "17.14.33"
);
$ytdata = $response->getJson();
// Why we need to write better InnerTube parsing tools:
foreach ($ytdata->contents->singleColumnBrowseResultsRenderer->tabs as $tab)
if (isset($tab->tabRenderer->content->sectionListRenderer))
foreach($tab->tabRenderer->content->sectionListRenderer->continuations as $cont)
if (isset($cont->reloadContinuationData))
$continuation = $cont->reloadContinuationData->continuation;
$newContinuation = WebV2Shelves::continuationToWeb($continuation);
// Thrown to next then
$response = yield Network::innertubeRequest(
action: "browse",
body: [
"continuation" => $newContinuation
]
);
$data = $response->getJson();
$yt->page->content = (object) [
"sectionListRenderer" => InnertubeBrowseConverter::sectionListRenderer(RichShelfUtils::reformatResponse($data)->sectionListRenderer)
];
});
}
/**
* History feed.
*/
private static function history(&$yt, $request) {
$params = new BrowseRequestParams();
if (isset($request->params->bp))
$params->mergeFromString(Base64Url::decode($request->params->bp));
if (isset($request->path[2]))
$params->setTab($request->path[2]);
Network::innertubeRequest(
action: "browse",
body: [
"browseId" => "FEhistory",
"params" => Base64Url::encode($params->serializeToString())
]
)->then(function ($response) use ($yt) {
$yt->page = HistoryModel::bake($response->getJson());
});
}
/**
* Other feeds.
*
* Don't even try to make sense of this.
*/
private static function miscFeeds(&$yt, $request, $feedId) {
$params = new BrowseRequestParams();
if (isset($request->params->bp))
$params->mergeFromString(Base64Url::decode($request->params->bp));
if (isset($request->params->flow))
$params->setFlow((int) $request->params->flow);
if (isset($request->path[2]))
$params->setTab($request->path[2]);
Network::innertubeRequest(
action: "browse",
body: [
"browseId" => $feedId,
"params" => Base64Url::encode($params->serializeToString())
]
)->then(function ($response) use ($yt) {
$ytdata = $response->getJson();
if (isset($ytdata->contents->twoColumnBrowseResultsRenderer))
foreach ($ytdata->contents->twoColumnBrowseResultsRenderer->tabs as $tab)
if (isset($tab->tabRenderer->content))
$content = $tab->tabRenderer->content;
if (isset($content->sectionListRenderer)) {
$content->sectionListRenderer = InnertubeBrowseConverter::sectionListRenderer($content->sectionListRenderer, [
"channelRendererUnbrandedSubscribeButton" => true
]);
}
$yt->page->content = $content;
if (isset($ytdata->header))
foreach ($ytdata->header as $header)
if (isset($header->title))
if (isset($header->title->runs)
|| isset($header->title->simpleText))
$yt->page->title = ParsingUtils::getText($header->title);
else
$yt->page->title = $header->title;
});
}
/**
* Subscriptions feed.
*
* Now a separate function due to the rich grid update.
*
* For anyone who is about to read or edit this function, I am sincerely
* sorry, and I wish you the best of luck. You're going to need it.
*/
private static function subscriptions(&$yt, $request)
{
$list = ((int)@$request->params->flow == 2);
Network::innertubeRequest(
action: "browse",
body: [
"browseId" => "FEsubscriptions"
]
)->then(function($response) use (&$yt, $list) {
$ytdata = $response->getJson();
$rcontents = $ytdata->contents->twoColumnBrowseResultsRenderer->tabs[0]->tabRenderer->content->richGridRenderer->contents;
$contents = [
(object) [
"shelfRenderer" => $rcontents[0]->richSectionRenderer->content->shelfRenderer
]
];
$menu = &$contents[0]->shelfRenderer->menu->menuRenderer->topLevelButtons;
// Fix the state of the shelf menu accordingly
if ($list)
{
foreach ($menu as $button)
{
$button->buttonRenderer->isSelected = !$button->buttonRenderer->isSelected;
}
}
// Snip the shelf off the array so we can work on the videos themselves
array_shift($rcontents);
foreach ($rcontents as $i => $content)
if (isset($content->richItemRenderer))
{
if ($list)
{
if ($i == 0)
{
$contents[0]->shelfRenderer->content = (object) [
"expandedShelfContentsRenderer" => (object) [
"items" => [
InnertubeBrowseConverter::richItemRenderer($content->richItemRenderer, [
"listView" => $list
])
]
]
];
}
else
{
$contents[] = InnertubeBrowseConverter::richItemRenderer($content->richItemRenderer, [
"listView" => $list
]);
}
}
}
$contents = InnertubeBrowseConverter::generalLockupConverter($contents);
$yt->page->content = (object) [
"sectionListRenderer" => (object) [
"contents" => [
(object) [
"itemSectionRenderer" => (object) [
"contents" => $contents
]
]
]
]
];
if ($cont = @$rcontents[count($rcontents)]->continuationItemRenderer)
{
$ctoken = &$cont->continuationEndpoint->continuationCommand->token;
$contw = new VideosContinuationWrapper();
$contw->setContinuation($ctoken);
$contw->setList($list);
$contw->setWrapInGrid(!$list);
$ctoken = Base64Url::encode($contw->serializeToString());
$yt->page->content->sectionListRenderer->contents[] = (object) [
"continuationItemRenderer" => $cont
];
}
$yt->test = $rcontents;
if (isset($ytdata->header))
foreach ($ytdata->header as $header)
if (isset($header->title))
if (isset($header->title->runs)
|| isset($header->title->simpleText))
$yt->page->title = ParsingUtils::getText($header->title);
else
$yt->page->title = $header->title;
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use Rehike\Controller\core\NirvanaController;
use Rehike\Model\AllComments\AllCommentsModel;
use Rehike\Network;
use function Rehike\Async\async;
return new class extends NirvanaController
{
public $template = "all_comments";
public function onGet(&$yt, $request)
{
return async(function() use (&$yt, $request) {
$this->useJsModule("www/watch");
if (!isset($request->params->v))
header("Location: /oops");
$yt->videoId = $request->params->v;
$response = yield Network::innertubeRequest("next", [
"videoId" => $request->params->v
]);
$wdata = $response->getJson();
$results = $wdata->contents->twoColumnWatchNextResults->results->results->contents;
// Invalid video ID
if (isset($results[0]->itemSectionRenderer->contents[0]->backgroundPromoRenderer))
header("Location: /oops");
// To get the videoRenderer of the video
$sresponse = yield Network::innertubeRequest("search", [
"query" => $request->params->v,
"params" => "QgIIAQ%253D%253D" // Ensure YouTube doesn't autocorrect the query
]);
$sdata = $sresponse->getJson();
$cdata = null;
foreach ($results as $result)
{
if (@$result->itemSectionRenderer->targetId == "comments-section")
{
$ctoken = $result->itemSectionRenderer->contents[0]->continuationItemRenderer->continuationEndpoint->continuationCommand->token;
$cresponse = yield Network::innertubeRequest("next", [
"continuation" => $ctoken
]);
$yt->commentsToken = $ctoken;
$cdata = $cresponse->getJson();
}
}
if ($cdata != null)
{
$yt->page = AllCommentsModel::bake($sdata, $cdata, $request->params->v);
}
else
{
header("Location: /oops");
}
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use \Com\Youtube\Innertube\Request\BrowseRequestParams;
use \Rehike\Controller\core\NirvanaController;
use \Rehike\Model\Playlist\PlaylistModel;
use \Rehike\Model\Channels\Channels4Model;
use \Rehike\Model\Channels\Channels4\MHeader;
use \Rehike\Model\Channels\Channels4\MCarouselHeader;
use \Rehike\Model\Channels\Channels4\MSecondaryHeader;
use \Rehike\Util\Base64Url;
use \Rehike\Network;
use \Rehike\i18n;
use Rehike\Util\ChannelUtils;
use Rehike\Signin\API as SignIn;
use function Rehike\Async\async;
return new class extends NirvanaController
{
public $template = "playlist";
public function onGet(&$yt, $request)
{
return async(function() use (&$yt, $request) {
if (!isset($request->params->list))
{
header("Location: /oops");
}
// The playlist ID is stored in the URL parameter ?list=...
$yt->playlistId = $request->params->list;
// Internally, all playlist IDs are prefixed with VL, followed by
// their canonical prefix (PL, RD, LL, UU, etc.).
$this->setEndpoint("browse", "VL" . $yt->playlistId);
$response = yield Network::innertubeRequest(
action: "browse",
body: [
"browseId" => "VL" . $yt->playlistId
]
);
$ytdata = $response->getJson();
$yt->page = PlaylistModel::bake($ytdata);
// Hitchhiker also showed the channel's header, so this also
// requests the channel page in order to get its owner's header.
$yt->ucid = $ytdata->header->playlistHeaderRenderer
->ownerEndpoint->browseEndpoint->browseId ?? null;
if (isset($yt->ucid))
{
// Init i18n for channel model
i18n::newNamespace("channels")->registerFromFolder("i18n/channels");
$params = new BrowseRequestParams();
$params->setTab("playlists");
$yt->partiallySelectTabs = true;
$channelResponse = yield Network::innertubeRequest(
action: "browse",
body: [
"browseId" => $yt->ucid,
"params" => Base64Url::encode($params
->serializeToString()
)
]
);
// If there's a channel response, then use it.
// Otherwise this then is never executed.
$channelData = $channelResponse->getJson();
if ($header = @$channelData->header->c4TabbedHeaderRenderer)
{
$yt->page->channelHeader = new MHeader($header, "/channel/$yt->ucid");
}
elseif ($header = @$channelData->header->carouselHeaderRenderer)
{
$yt->page->channelHeader = new MCarouselHeader($header, "/channel/$yt->ucid");
}
// If user is signed in and channel owner, get data for the
// secondary channel header.
$ownerData = null;
if ($yt->ucid == @SignIn::getInfo()["ucid"])
{
$ownerData = yield ChannelUtils::getOwnerData($yt->ucid);
}
if (!is_null($ownerData))
{
$yt->page->secondaryHeader = new MSecondaryHeader($ownerData);
}
if (isset($yt->page->channelHeader))
{
$header = &$yt->page->channelHeader;
$yt->appbar->addNav();
$yt->appbar->nav->addOwner(
$header->getTitle(),
"/channel/$yt->ucid",
$header->thumbnail ?? "",
);
}
if ($tabs = @$channelData->contents->twoColumnBrowseResultsRenderer->tabs)
{
Channels4Model::processAndAddTabs(
$yt,
$tabs,
$yt->page->channelHeader
);
}
}
});
}
};
| {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller;
use Rehike\Controller\core\HitchhikerController;
use Rehike\Network;
use Rehike\Model\Attribution\AttributionModel;
/**
* Controller for the video attribution information page.
*
* Technically, this page doesn't exist anymore. Rehike includes it for two
* reasons:
* 1. As a homage to it being the last true Hitchhiker page online.
* 2. For compatibility with Shorts attributions, a new feature that does
* exist.
*
* @author Aubrey Pankow <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends HitchhikerController {
public $template = "attribution";
public function onGet(&$yt, $request) {
if (!isset($request->params->v)) {
$this->template = "oops";
return;
}
$videoId = $request->params->v;
Network::innertubeRequest(
action: "navigation/resolve_url",
body: [
"url" => "https://www.youtube.com/source/" . $videoId . "/shorts"
]
)->then(function ($resolve) {
$resolveData = $resolve->getJson();
if (!isset($resolveData->endpoint->browseEndpoint->params)) {
$this->template = "oops";
return;
}
return Network::innertubeRequest(
action: "browse",
body: [
"browseId" => "FEsfv_audio_pivot",
"params" => $resolveData->endpoint->browseEndpoint->params
]
);
})->then(function ($response) use ($yt, $videoId) {
$ytdata = $response->getJson();
$yt->page = AttributionModel::bake($ytdata, $videoId);
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller;
use Rehike\Network;
use Rehike\Signin\API as SignIn;
/**
* Controller for the /profile endpoint.
*
* This endpoint simply redirects to the user's channel page if they're logged
* in. Otherwise it redirects to the homepage.
*
* In other words, this only exists for compatibility with the standard YT
* server.
*
* @author Aubrey Pankow <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends \Rehike\Controller\core\HitchhikerController {
// Doesn't have a corresponding page as this redirects the user.
public $useTemplate = false;
public function onGet(&$yt, $request) {
if (!SignIn::isSignedIn()) {
header("Location: /");
exit();
}
Network::innertubeRequest(
action: "navigation/resolve_url",
body: [
"url" => "https://www.youtube.com/profile"
]
)->then(function ($response) {
$ytdata = $response->getJson();
if ($a = @$ytdata->endpoint->urlEndpoint->url) {
header("Location: " . str_replace("https://www.youtube.com", "", $a));
}
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use \Rehike\Controller\core\HitchhikerController;
/**
* Controller for the oops (error) page.
*
* Very simple one, I know. All it's needed for is making a bridge between
* CV2 and the static error page.
*
* @author The Rehike Maintainers
*/
return new class extends HitchhikerController {
public $template = "oops";
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller;
use Rehike\Controller\core\NirvanaController;
use Rehike\Model\Results\ResultsModel;
use \Com\Youtube\Innertube\Request\SearchRequestParams;
use Rehike\Network;
use Rehike\i18n;
use Rehike\Util\Base64Url;
/**
* Controller for the results (search) page.
*
* This handles the base logic for directing to the search page, including
* pagination, which doesn't exist in any other client but is still supported
* by the InnerTube API.
*
* @author Aubrey Pankow <[email protected]>
* @author Daylin Cooper <[email protected]>
* @author Taniko Yamamoto <[email protected]>
* @author The Rehike Maintainers
*/
class ResultsController extends NirvanaController {
public $template = "results";
// No clue why these are static.
public static $query;
public static $param;
public function onGet(&$yt, $request) {
// invalid request redirect
if (!isset($_GET["search_query"])) {
header("Location: /");
die();
}
// Seemingly unused on the client-side (?), but this should still be
// declared regardless.
$this->useJsModule("www/results");
$i18n = &i18n::newNamespace("results");
$i18n->registerFromFolder("i18n/results");
// Setup search query internally
$query = $_GET["search_query"] ?? null;
self::$query = $query;
// Display query in the searchbox.
$yt->masthead->searchbox->query = $query;
// used for filters
$yt->params = $_GET["sp"] ?? null;
self::$param = &$yt->params;
// Calculates the offset to give the InnerTube server.
$resultsIndex = self::getPaginatorIndex($yt->params);
Network::innertubeRequest(
action: "search",
body: [
"query" => self::$query,
"params" => $yt->params
]
)->then(function ($response) use ($yt, $resultsIndex) {
$ytdata = $response->getJson();
$resultsCount = ResultsModel::getResultsCount($ytdata);
$paginatorInfo = self::getPaginatorInfo(
$resultsCount, $resultsIndex
);
$yt->page = ResultsModel::bake(
data: $ytdata,
paginatorInfo: $paginatorInfo,
query: self::$query
);
});
}
/**
* Get the index at which the page starts.
*
* This is *not* the page number. This is the index by which to shift the
* given results from the start, i.e. an index of 20 would start 20 results
* after the first result.
*
* @param $sp Base64-encoded search parameter provided by the YT server.
* @return int
*/
public static function getPaginatorIndex($sp) {
if ($sp == null) {
return 0;
} else {
try {
$parsed = new SearchRequestParams();
$parsed->mergeFromString(
Base64Url::decode($sp)
);
if ($parsed->hasIndex()) {
$index = $parsed->getIndex();
} else {
$index = 0;
}
return $index;
} catch (\Throwable $e) {
return 0;
}
}
}
/**
* Get information for the paginator at the bottom of the search page.
*
* @param int $resultsCount The number of results for the query.
* @param int $index Index at which to start the first result.
* @return object
*/
public static function getPaginatorInfo($resultsCount, $index) {
// youtube is 20 results/page
$resultsPerPage = 20;
$pageNo = ceil($index / $resultsPerPage) + 1;
$pagesCount = ceil($resultsCount / $resultsPerPage);
return (object) [
"resultsPerPage" => $resultsPerPage,
"pageNumber" => $pageNo,
"pagesCount" => $pagesCount
];
}
/**
* Get the URL parameter that indicates the search page to the server.
*
* @param string $sp Standard base64-encoded parameter to be modified.
* @param int $page The page number to encode.
*
* @return string A modified search parameter that uses the page.
*/
public static function getPageParam($sp = null, $page = 1) {
$parsed = new SearchRequestParams();
if ($sp == null) {
$parsed->setIndex(($page - 1) * 20);
$parsed->setSomething("");
} else {
try {
$parsed->mergeFromString(Base64Url::decode($sp));
} catch (\Throwable $e) {} // consume any exeception
$parsed->setIndex(($page - 1) * 20);
}
return Base64Url::encode($parsed->serializeToString());
}
/**
* Returns the URL for a page's index.
*
* @param string $sp Standard base64 encoded parameter to be modified.
* @param int $page The page number to encode.
*
* @return string URL for that page.
*/
public static function getPageParamUrl($sp = null, $page = 1) {
$query = urlencode(self::$query);
$param = self::getPageParam($sp, $page);
return "/results?search_query=$query&sp=$param";
}
};
return new ResultsController;
| {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller;
use Rehike\Controller\core\NirvanaController;
use \Com\Youtube\Innertube\Request\BrowseRequestParams;
use Rehike\Network;
use Rehike\Async\Promise;
use YukisCoffee\CoffeeRequest\Network\Response;
use Rehike\Util\Base64Url;
use Rehike\i18n;
use Rehike\Util\ExtractUtils;
use Rehike\Util\ChannelUtils;
use Rehike\Signin\API as SignIn;
use \Rehike\Model\Channels\Channels4Model as Channels4;
use function Rehike\Async\async;
class channel extends NirvanaController {
public $template = "channel";
public static $requestedTab = "";
// Tabs where the "Featured channels" sidebar should show on
public const SECONDARY_RESULTS_ENABLED_TAB_IDS = [
"featured",
"discussion",
"community",
"about"
];
// Indices of which cloud chip corresponds to each sort option
public const VIDEO_TAB_SORT_INDICES = [
"dd",
"p",
"da"
];
// Sort map for regular tabs that still use the old sorting backend
public const SORT_MAP = [
null,
"p",
"da",
"dd",
"lad"
];
public const VIDEO_TABS = [
"videos",
"streams"
];
public function onPost(&$yt, $request) {
http_response_code(404);
$this->template = "error/404";
}
public function onGet(&$yt, $request)
{
async(function() use (&$yt, $request) {
$this->useJsModule("www/channels");
// Init i18n
i18n::newNamespace("channels")->registerFromFolder("i18n/channels");
// BUG (kirasicecreamm): ChannelUtils::getUcid is hardcoded
// to look at the path property of the input object.
// This is bad design.
if ($request->path[0] != "channel")
{
$ucid = yield ChannelUtils::getUcid($request);
}
else
{
$ucid = $request->path[1];
}
$yt->ucid = $ucid;
if ($ucid == "" || is_null($ucid))
{
http_response_code(404);
$this->spfIdListeners = [];
$this->template = "error/404";
return;
}
// If user is signed in and channel owner, get data for the
// secondary channel header.
$ownerData = null;
if ($ucid == @SignIn::getInfo()["ucid"])
{
$ownerData = yield ChannelUtils::getOwnerData($ucid);
}
// Register the endpoint in the request
$this->setEndpoint("browse", $ucid);
// Get the requested tab
$tab = "featured";
if (!in_array($request->path[0], ["channel", "user", "c"])) {
if (isset($request->path[1]) && "" != @$request->path[1]) {
$tab = strtolower($request->path[1]);
}
} elseif (isset($request->path[2]) && "" != @$request->path[2]) {
$tab = strtolower($request->path[2]);
}
self::$requestedTab = $tab;
// Handle live tab redirect (if the channel is livestreaming)
if ("live" == $tab)
{
$this->handleLiveTabRedirect($request->rawPath);
}
// Expose tab to configure frontend JS
$yt->tab = $tab;
// Configure request params
if ("featured" != $tab ||
isset($request->params->shelf_id) ||
isset($request->params->view) ||
(isset($request->params->sort) && !in_array($tab, ["videos", "streams", "shorts"])))
{
$params = new BrowseRequestParams();
$params->setTab($tab);
}
if (isset($request->params->shelf_id)) {
$params->setShelfId((int) $request->params->shelf_id);
}
if (isset($request->params->view)) {
$params->setView((int) $request->params->view);
}
if (isset($request->params->sort) && !in_array($tab, ["videos", "streams", "shorts"]))
{
$id = array_search($request->params->sort, self::SORT_MAP);
if (is_int($id))
{
$params->setSort($id);
}
}
// Compose InnerTube requests for later.
$channelRequest = Network::innertubeRequest(
action: "browse",
body: [
"browseId" => $ucid,
"params" => isset($params)
? Base64Url::encode($params->serializeToString())
: null,
"query" => $request->params->query ?? null
]
);
if (
in_array($tab, self::SECONDARY_RESULTS_ENABLED_TAB_IDS) &&
"featured" != $tab
)
{
$sidebarRequest = Network::innertubeRequest(
action: "browse",
body: [
"browseId" => $ucid
]
);
}
else
{
$sidebarRequest = new Promise(fn($r) => $r());
}
// Run the channel and sidebar requests at the same time and store them in different
// variables.
[$channelResponse, $sidebarResponse] = yield Promise::all($channelRequest, $sidebarRequest);
$page = $channelResponse->getJson();
$yt->response = $page;
// Get content for current sort if it
// is not recently uploaded (default)
$yt->videosSort = 0;
if (in_array($tab, self::VIDEO_TABS) && isset($request->params->sort))
{
// Get index of sort name
$sort = array_search($request->params->sort, self::VIDEO_TAB_SORT_INDICES);
$yt->videosSort = $sort;
if ($sort > 0)
{
$tabs = &$page->contents->twoColumnBrowseResultsRenderer->tabs;
// Do NOT call this $tab. It will override the previous $tab
// and cause an object to be registered as the current tab.
foreach ($tabs as &$tabR)
{
if (@$tabR->tabRenderer->selected)
{
$grid = &$tabR->tabRenderer->content->richGridRenderer ?? null;
break;
}
}
if (isset($grid))
{
$ctoken = $grid->header->feedFilterChipBarRenderer->contents[$sort]
->chipCloudChipRenderer->navigationEndpoint->continuationCommand
->token ?? null;
if (isset($ctoken))
{
$sort = yield Network::innertubeRequest(
action: "browse",
body: [
"continuation" => $ctoken
]
);
$newContents = $sort->getJson();
$newContents = $newContents
->onResponseReceivedActions[1]
->reloadContinuationItemsCommand
->continuationItems ?? null;
if (isset($newContents) && is_array($newContents))
{
$grid->contents = $newContents;
}
}
}
}
}
$yt->subConfirmation = false;
if (isset($request->params->sub_confirmation))
{
if ($request->params->sub_confirmation == "1")
{
$yt->subConfirmation = true;
}
}
switch ($request->path[0]) {
case "c":
case "user":
case "channel":
$baseUrl = "/" . $request->path[0] . "/" . $request->path[1];
break;
default:
$baseUrl = "/" . $request->path[0];
break;
}
Channels4::registerBaseUrl($baseUrl);
Channels4::registerCurrentTab($tab);
// Handle the sidebar
$sidebar = null;
if (isset($sidebarResponse))
{
$sidebar = $sidebarResponse->getJson();
}
else if ("featured" == $tab)
{
$sidebar = $page;
}
$yt->page = Channels4::bake(
yt: $yt,
data: $page,
sidebarData: $sidebar,
ownerData: $ownerData
);
});
}
/**
* Redirect to a channel's livestream by visiting their live URL.
*
* This only works if said channel is in the process of livestreaming,
* otherwise this will have no effect and will simply take you to the
* featured tab of the channel.
*/
public function handleLiveTabRedirect($path)
{
Network::innertubeRequest(
action: "navigation/resolve_url",
body: [
"url" => "https://www.youtube.com" . $path
]
)->then(function ($response) {
$ytdata = $response->getJson();
if (isset($ytdata->endpoint->watchEndpoint))
{
$url = "/watch?v=" . $ytdata->endpoint->watchEndpoint->videoId;
(require "includes/spf_redirect_handler.php")($url);
}
});
}
}
// Export
return new channel();
| {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller;
use Rehike\Controller\core\HitchhikerController;
use Rehike\Network;
use Rehike\Signin\API as SignIn;
use \Rehike\Model\ChannelSwitcher\ChannelSwitcherModel;
use function Rehike\Async\async;
// TODO: send "X-Goog-AuthUser" header in innertube request
return new class extends HitchhikerController
{
public $template = "channel_switcher";
public function onGet(&$yt, $request)
{
async(function() use (&$yt, &$request) {
if (!SignIn::isSignedIn())
{
header("Location: https://accounts.google.com/v3/signin/identifier?dsh=S369128673%3A1675950960460363&continue=https%3A%2F%2Fwww.youtube.com%2Fsignin%3Faction_handle_signin%3Dtrue%26app%3Ddesktop%26hl%3Den%26next%3Dhttps%253A%252F%252Fwww.youtube.com%252Fchannel_switcher%26feature%3Dredirect_login&hl=en&passive=true&service=youtube&uilel=3&flowName=GlifWebSignIn&flowEntry=ServiceLogin&ifkv=AWnogHdTaLSFWkbPzHGsk61TYFu3C76VEZLMz1uTSkocGsIfWWBDd8s0xL3geNfwrIMQ3RiPfuGgGg");
}
$ytdata = (yield Network::innertubeRequest(
action: "account/accounts_list",
body: [
"requestType" => "ACCOUNTS_LIST_REQUEST_TYPE_CHANNEL_SWITCHER",
"callCircumstance" => "SWITCHING_USERS_FULL"
]
))->getJson();
$channels = $ytdata->actions[0]->updateChannelSwitcherPageAction->page->channelSwitcherPageRenderer->contents ?? null;
// TODO: Get from cache
$switcherOriginal = (yield Network::urlRequestFirstParty(
"https://www.youtube.com/getAccountSwitcherEndpoint",
))->getText();
$switcher = json_decode(substr($switcherOriginal, 4));
$yt->channels = $channels;
$next = null;
if (isset($request->params->next))
{
$next = $request->params->next;
}
$yt->page = ChannelSwitcherModel::bake($channels, $switcher, $next);
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use Rehike\Controller\core\NirvanaController;
use Com\Youtube\Innertube\Request\NextRequestParams;
use Com\Youtube\Innertube\Request\NextRequestParams\UnknownThing;
use Rehike\Network;
use Rehike\Async\Promise;
use Rehike\Util\Base64Url;
use Rehike\ConfigManager\ConfigManager;
use Rehike\Util\WatchUtils;
use Rehike\Util\ExtractUtils;
use Rehike\i18n;
use Rehike\Model\Watch\WatchModel;
use YukisCoffee\CoffeeRequest\Exception\GeneralException;
/**
* Controller for the watch page.
*
* @author Aubrey Pankow <[email protected]>
* @author Daylin Cooper <[email protected]>
* @author Taniko Yamamoto <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends NirvanaController {
public $template = 'watch';
// Watch should only load the guide after everything else is done.
protected $delayLoadGuide = true;
public function onGet(&$yt, $request)
{
$this->useJsModule("www/watch");
i18n::newNamespace("watch")->registerFromFolder("i18n/watch");
// invalid request redirect
if (!isset($_GET['v'])) {
header('Location: /');
die();
}
/*
* Set theater mode state.
*/
if ("1" == @$_COOKIE['wide'])
{
$yt->theaterMode = $_COOKIE['wide'];
}
else
{
$yt->theaterMode = "0";
$_COOKIE['wide'] = "0";
}
// begin request
$yt->videoId = $request->params->v;
$yt->playlistId = $request->params->list ?? null;
// What the fuck.
$yt->playlistIndex = (string) ((int) ($request->params->index ?? '1'));
// ?!?!?!?!?!?!?!?!?!?!?!?!?!?!?!?!?!?!?!?!?!?
if (0 == $yt->playlistIndex) $yt->playlistIndex = 1;
// Used by InnerTube in some cases for player-specific parameters.
$yt->playerParams = $request->params->pp ?? null;
// Common parameters to be used for both the next API and player API.
$sharedRequestParams = [
'videoId' => $yt->videoId
];
// Defines parameters to be sent only to the next (watch data) API.
// Required for LC link implementation.
$nextOnlyParams = [];
$lc = $request->params->lc ?? $request->params->google_comment_id ?? null;
/*
* Generate LC (linked comment) param.
*
* This is handled by InnerTube as a next parameter, which is base64-
* encoded as with similar params. As such, it needs to be encoded like
* any other protobuf/base64 parameter (ugly).
*
* LC itself simply modifies the comment continuation that's provided
* to link to a specific comment.
*/
if (isset($lc))
{
$param = new NextRequestParams();
// I don't know if this is needed, but I want to include it
// anyways.
$param->setUnknownThing(new UnknownThing(["a" => 0]));
$param->setLinkedCommentId($lc);
$nextOnlyParams += [
"params" => Base64Url::encode($param->serializeToString())
];
}
if (!is_null($yt->playlistId)) {
$sharedRequestParams['playlistId'] = $yt->playlistId;
$sharedRequestParams['playlistIndex'] = $yt->playlistIndex;
}
// TODO (kirasicecreamm): Clean up this algo, make better
if (isset($request->params->t)) {
preg_match_all("/\d{1,6}/", $request->params->t, $times);
$times = $times[0];
if (count($times) == 1) { // before you whine "waaahh use case" I CAN'T IT BREAKS IT FOR NO FUCKING REASON, if you wanna make this better, go ahead
$startTime = (int) $times[0];
} else if (count($times) == 2) {
$startTime = ((int) $times[0] * 60) + (int) $times[0];
} else if (count($times) == 3) {
$startTime = ((int) $times[0] * 3600) + ((int) $times[1] * 60) + (int) $times[2];
} else {
$startTime = 0;
}
}
// Makes the main watch request.
$nextRequest = Network::innertubeRequest(
"next",
$sharedRequestParams + $nextOnlyParams
);
// Unlike Polymer, Hitchhiker had all of the player data already
// available in the initial response. So an additional player request
// is used.
$playerRequest = Network::innertubeRequest(
"player",
[
"playbackContext" => [
'contentPlaybackContext' => (object) [
'autoCaptionsDefaultOn' => false,
'autonavState' => 'STATE_OFF',
'html5Preference' => 'HTML5_PREF_WANTS',
'lactMilliseconds' => '13407',
'mdxContext' => (object) [],
'playerHeightPixels' => 1080,
'playerWidthPixels' => 1920,
'signatureTimestamp' => $yt->playerConfig->signatureTimestamp
]
],
"startTimeSecs" => $startTime ?? 0,
"params" => $yt->playerParams
] + $sharedRequestParams
);
/**
* Determine whether or not to use the Return YouTube Dislike
* API to return dislikes. Retrieved from application config.
*/
if (true === ConfigManager::getConfigProp("appearance.useRyd"))
{
$rydUrl = "https://returnyoutubedislikeapi.com/votes?videoId=" . $yt->videoId;
$rydRequest = Network::urlRequest($rydUrl);
}
else
{
// If RYD is disabled, then send a void Promise that instantly
// resolves itself.
$rydRequest = new Promise(fn($r) => $r());
}
Promise::all([
"next" => $nextRequest,
"player" => $playerRequest,
"ryd" => $rydRequest
])->then(function ($responses) use ($yt) {
$nextResponse = $responses["next"]->getJson();
$playerResponse = $responses["player"]->getJson();
try
{
$rydResponse = $responses["ryd"]->getJson();
}
catch (GeneralException $e)
{
$rydResponse = (object) [];
}
// This may not be needed any longer, but manually removing ads
// has been historically required as adblockers no longer have
// the Hitchhiker-era rules.
$this->removeAds($playerResponse);
// Push these over to the global object.
$yt->playerResponse = $playerResponse;
$yt->watchNextResponse = $nextResponse;
$yt->page = WatchModel::bake(
yt: $yt,
data: $nextResponse,
videoId: $yt->videoId,
rydData: $rydResponse
);
});
}
/**
* Handles SPF requests.
*
* Specifically, this binds the player data to the SPF data in order to
* refresh the player on the client-side.
*
* @param $data SPF data.
* @return void (Modifies $data.)
*/
public function handleSpfData(&$data)
{
$yt = &$this->yt;
if (isset($yt->playerResponse)) {
$data->data = (object) [
'swfcfg' => (object) [
'args' => (object) [
'raw_player_response' => null,
'raw_watch_next_response' => null
]
]
];
$data->data->swfcfg->args->raw_player_response = $yt->playerResponse;
$data->data->swfcfg->args->raw_watch_next_response = $yt->watchNextResponse;
if (isset($yt->page->playlist)) {
$data->data->swfcfg->args->is_listed = '1';
$data->data->swfcfg->args->list = $yt->playlistId;
$data->data->swfcfg->args->videoId = $yt->videoId;
}
}
}
/**
* Remove ads from a player response if they exist.
*/
protected function removeAds(object $playerResponse): void
{
if (isset($playerResponse->playerAds))
unset($playerResponse->playerAds);
if (isset($playerResponse->adPlacements))
unset($playerResponse->adPlacements);
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\core;
use SpfPhp\SpfPhp;
use Rehike\Model\Appbar\MAppbar as Appbar;
use Rehike\Model\Footer\MFooter as Footer;
use Rehike\Model\Masthead\MMasthead as Masthead;
/**
* Defines a general YouTube Nirvana controller.
*
* This implements the base API and data used to render a Nirvana (Appbar)
* page.
*
* @author Taniko Yamamoto <[email protected]>
* @author Aubrey Pankow <[email protected]>
* @author Daylin Cooper <[email protected]>
*/
abstract class NirvanaController extends HitchhikerController
{
/**
* Don't request the guide on initial visit.
*
* This should be true on pages like watch, where the guide
* isn't open by default.
*
* @var bool
*/
protected $delayLoadGuide = false;
/** @inheritdoc */
protected $spfIdListeners = [
'@body<class>',
'player-unavailable<class>',
'debug',
'early-body',
'appbar-content<class>',
'alerts',
'content',
'@page<class>',
'header',
'ticker-content',
'player-playlist<class>',
'@player<class>'
];
/** @inheritdoc */
protected function init(&$yt, &$template)
{
$yt->spfEnabled = true;
$yt->useModularCore = true;
$yt->modularCoreModules = [];
$yt->appbar = new Appbar();
$yt->page = (object)[];
if ($this->useTemplate) {
$yt->masthead = new Masthead(true);
$yt->footer = new Footer();
}
$yt->footer = new Footer();
// Request appbar guide fragments if the page has the
// guide enabled, the request is not SPF, and the guide
// is open by default.
if (!$this->delayLoadGuide && !SpfPhp::isSpfRequested())
{
$this->getPageGuide()->then(function ($guide) use ($yt) {
$yt->appbar->addGuide($guide);
});
}
}
/**
* Define the page to use a JS page module.
*
* @param string $module Name of the module (not URL)
*
* @return void
*/
protected function useJsModule($module)
{
$this->yt->modularCoreModules[] = $module;
}
} | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\core;
use Rehike\TemplateManager;
use Rehike\Network;
use Rehike\Async\Promise;
use Rehike\SecurityChecker;
use Rehike\Player\PlayerCore;
use SpfPhp\SpfPhp;
use Rehike\ControllerV2\RequestMetadata;
use Rehike\Debugger\Debugger;
use Rehike\Model\Guide\MGuide as Guide;
use Rehike\Model\Footer\MFooter as Footer;
use Rehike\Model\Masthead\MMasthead as Masthead;
use Rehike\Model\Rehike\Security\SecurityLightbox;
/**
* Defines a general YouTube Hitchhiker controller.
*
* This implements the base API and data used to render a Hitchhiker
* page.
*
* @author Taniko Yamamoto <[email protected]>
* @author Aubrey Pankow <[email protected]>
* @author Daylin Cooper <[email protected]>
*/
abstract class HitchhikerController
{
/**
* Stores information about the current page endpoint.
*
* @var object
*/
protected static $currentEndpoint;
/**
* Stores all information that is sent to Twig for rendering the page.
*
* @var object $yt
* + useModularCore (bool, required) - Toggles base.js/core.js use by Hitchhiker.
* + modularCoreModules (string[]) - Defines base.js page modules.
* + spfEnabled (bool, required) - Enables YouTube SPF (soft loading).
* + spf (bool, required) - True if the page is navigated to via SPF.
* + title (string) - Page title name
* + appbar (object) - Available in NirvanaController; defines YouTube Appbar.
* + page (object) - Page metadata
*/
protected $yt;
/**
* Defines the default page template.
*
* This may be overridden for certain contexts in an onGet()
* callback.
*
* @var string
*/
public $template = "";
/**
* Whether or not we should use a Twig template to render.
*
* Some AJAX responses are so simple, that using a template
* makes no sense.
*
* @var boolean
*/
public $useTemplate = true;
/**
* Defines the default element IDs that are listened to by
* YouTube's SPF library.
*
* This defines what elements get changed with every soft navigation.
*
* @var string[]
*/
protected $spfIdListeners = [
'player-unavailable<class>',
'alerts',
'content',
'@page<class>',
'player-playlist<class>',
'@player<class>'
];
/**
* What the Content-Type header should be in the response
*
* @var string
*/
public $contentType = "text/html";
/**
* Implements the base functionality that is ran on every GET request.
*
* This function should not be overridden for page-specific
* functionality. Use the controller's API (onGet()) for that.
*
* @param object $yt Template data.
*
* @param string $template Passes a template in and out of the function.
* For API usage, you can safely ignore this. It only
* matters on the technical end.
*
* @param RequestMetadata $request Reports request metadata.
*
* @return void
*/
public function get(&$yt, &$template, $request)
{
header("Content-Type: " . $this->contentType);
$this->yt = &$yt;
$this->init($yt, $template);
$this->initPlayer($yt);
$this->onGet($yt, $request);
Network::run();
$this->postInit($yt, $template);
if ($this->useTemplate) $this->doGeneralRender();
}
/**
* Implements the base functionality that is ran on every POST request.
*
* This function should not be overridden for page-specific
* functionality. Use the controller's API (onPost()) for that.
*
* @param object $yt Template data.
*
* @param string $template Passes a template in and out of the function.
* For API usage, you can safely ignore this. It only
* matters on the technical end.
*
* @param RequestMetadata $request Reports request metadata.
*
* @return void
*/
public function post(&$yt, &$template, $request)
{
header("Content-Type: " . $this->contentType);
$this->yt = &$yt;
$this->init($yt, $template);
$this->onPost($yt, $request);
Network::run();
$this->postInit($yt, $template);
if ($this->useTemplate) $this->doGeneralRender();
}
/**
* Initialise the player.
*
* @param object $yt Template data.
* @return void
*/
public function initPlayer(&$yt)
{
$playerConfig = PlayerCore::getInfo();
$yt->playerConfig = $playerConfig;
}
/**
* Request the guide and return the processed result.
*
* As Rehike implements a Nirvana frontend primarily, this behaviour
* is unused by the base Hitchhiker controller. This function
* is used by NirvanaController.
*
* @return object
*/
public function getPageGuide(): Promise
{
return new Promise(function ($resolve) {
Network::innertubeRequest("guide")->then(function ($response)
use ($resolve)
{
$data = $response->getJson();
$guide = Guide::fromData($data);
$resolve($guide);
});
});
}
/**
* Set the current page endpoint.
*
* This is only used internally for coordinating the pages. More
* specifically, it is used by the guide service to know which item
* to select.
*
* @param string $type of the endpoint
* @param string $a (whatever the endpoint offers)
*/
public function setEndpoint($type, $a)
{
$type = strtolower($type);
// Will be casted to an object
$data = [];
switch ($type)
{
case "browse":
$data["browseEndpoint"] = (object)[
"browseId" => $a
];
break;
case "url":
$data["urlEndpoint"] = (object)[
"url" => $a
];
break;
}
$data = (object)$data;
self::$currentEndpoint = $data;
}
/**
* Defines the API for handling GET requests. Pages should always use this;
* only subcontrollers may override onGet() directly.
*
* @param object $yt Template data.
* @param RequestMetadata $request Reports request metadata.
*
* @return void
*/
public function onGet(&$yt, $request) {}
/**
* Defines the API for handling POST requests. Pages should always use this;
* only subcontrollers may override onPost() directly.
*
* @param object $yt Template data.
* @param RequestMetadata $request Reports request metadata.
*
* @return void
*/
public function onPost(&$yt, $request) {}
/**
* Set initial variables for this controller type.
*
* @param $yt Template data.
* @param $template Backend template data.
*
* @return void
*/
protected function init(&$yt, &$template)
{
$yt->spfEnabled = false;
$yt->useModularCore = false;
$yt->page = (object)[];
if ($this->useTemplate) {
$yt->masthead = new Masthead(false);
$yt->footer = new Footer();
}
}
/**
* Defines the tasks performed after the page is done being built.
*
* Mainly, this prepares data internally to prepare sending to Twig.
*
* @param $yt Template data.
* @param $template Backend template data.
*
* @return void
*/
public function postInit(&$yt, &$template)
{
$template = $this->template;
$yt->currentEndpoint = self::$currentEndpoint;
if (!SecurityChecker::isSecure() && !SpfPhp::isSpfRequested())
{
$yt->rehikeSecurityNotice = new SecurityLightbox();
}
}
/**
* Perform a Twig render, accounting for SPF status if it is enabled, and
* reporting the debugger if it is enabled.
*
* @return void
*/
public function doGeneralRender()
{
if (SpfPhp::isSpfRequested() && $this->yt->spfEnabled)
{
// Report SPF status to the templater
$this->yt->spf = true;
// Capture the render so that we may send it through SpfPhp.
$capturedRender = TemplateManager::render();
// Skip serialisation so that the output may be modified. (also
// suppress warnings; idk why (buggy library lol))
$spf = @SpfPhp::parse($capturedRender, $this->spfIdListeners, [
"skipSerialization" => true
]);
// Post-data generation callback for custom handling
$this->handleSpfData($spf);
if (is_object($spf))
$spf->rebug_data = Debugger::exposeSpf();
header("Content-Type: application/json");
echo json_encode($spf);
}
else
{
/*
* Expose the debugger if it is enabled. All necessary checks are performed
* within this function, so all that needs to be done here is calling it.
*/
Debugger::expose();
$capturedRender = TemplateManager::render();
// In the case this is not an SPF request, we don't have to do anything.
echo $capturedRender;
}
}
/**
* Modify generated SPF data before it's sent to the client.
*
* For example, adding custom metadata to the response.
*
* @param object $data reference
* @return void
*/
public function handleSpfData(&$data) {}
}
| {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\core;
use \Rehike\Controller\core\NirvanaController;
/**
* Defines a general AJAX endpoint controller.
*
* @author Aubrey Pankow <[email protected]>
* @author The Rehike Maintainers
*/
abstract class AjaxController extends NirvanaController {
public $contentType = "application/json";
// Find action
// Not used for watch_fragments or watch_fragments2 (electric boogaloo)
protected function findAction() {
foreach ($_GET as $key => $value) {
if (strpos($key, "action_") > -1) {
return str_replace("action_", "", $key);
}
}
return null;
}
protected static function error() {
http_response_code(400);
die('{"errors":[]}');
}
} | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use Rehike\Controller\core\AjaxController;
use \Rehike\Network;
use SpfPhp\SpfPhp;
/**
* Controller for AJAX feeds.
*
* This is only used for the notifications page.
*
* @author Aubrey Pankow <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends AjaxController {
public $template = "ajax/feed/get_notifications";
public function onGet(&$yt, $request) {
if (!@$yt->signin["isSignedIn"]) self::error();
$action = self::findAction();
if (@$action == "get_unseen_notification_count") {
$this->useTemplate = false;
Network::innertubeRequest(
action: "notification/get_unseen_count"
)->then(function ($response) {
$ytdata = $response->getJson();
$updateAction = $ytdata->actions[0]->updateNotificationsUnseenCountAction;
echo json_encode((object) [
"unseen_notification_count" => $updateAction->unseenCount ?? $ytdata->unseenCount ?? null,
"timestamp_lower_bound" => 0,
"high_priority_notification_timeout_ms" => 3000,
"polling_timeout" => $updateAction->timeoutMs ?? 1800000
]);
});
} else if (@$action == "continuation") {
$this->template = "ajax/feed/continuation";
if (!@$request->params->continuation) {
echo json_encode((object) [
"errors" => [
"Specify a continuation"
]
]);
die();
}
Network::innertubeRequest(
action: "notification/get_notification_menu",
body: [
"ctoken" => $request->params->continuation ?? null
]
)->then(function ($response) use ($yt) {
$ytdata = $response->getJson();
$yt->notifList = $ytdata->actions[0] ->appendContinuationItemsAction->continuationItems ?? null;
$yt->nextContinuation = (end($yt->notifList)
->continuationItemRenderer
->continuationEndpoint
->getNotificationMenuEndpoint
->ctoken) ?? null;
});
} else {
$this->getNotifications($yt, $request);
}
}
public function onPost(&$yt, $request) {
if (!@$yt->signin["isSignedIn"]) self::error();
$this->getNotifications($yt, $request);
}
private function getNotifications(&$yt, $request): void
{
$this->spfIdListeners = [
"yt-masthead-notifications-content"
];
Network::innertubeRequest(
action: "notification/get_notification_menu",
body: [
"notificationsMenuRequestType" => "NOTIFICATIONS_MENU_REQUEST_TYPE_INBOX"
]
)->then(function ($response) use ($yt) {
$ytdata = $response->getJson();
$yt->notifSections = $ytdata->actions[0]
->openPopupAction->popup->multiPageMenuRenderer->sections;
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use Rehike\Controller\core\AjaxController;
use Rehike\Network;
use function Rehike\Async\async;
/**
* Controller for the other playlist AJAX endpoints.
* ...yeah, playlists on Hitchhiker are kinda odd to
* work with.
*
* @author Aubrey Pankow <[email protected]>
* @author Taniko Yamamoto <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends AjaxController
{
public $useTemplate = false;
public function onPost(&$yt, $request)
{
$action = self::findAction();
switch ($action)
{
// This is the feature that saves the playlist to your library.
// It used to be called liking playlists, and for that reason,
// it retains that name internally.
case "playlist_vote":
$action = null;
if ($_POST["vote"] == "like")
{
$action = "like";
}
else if ($_POST["vote"] == "remove_like")
{
$action = "removelike";
}
if (!is_null($action))
{
Network::innertubeRequest($action, [
"target" => (object) [
"playlistId" => $_POST["list"]
]
], ignoreErrors: true)->then(function($response) {
var_dump($response);
// $ytdata = $response->getJson();
// if (isset($ytdata->error))
// {
// echo (object) [
// "code" => $ytdata->error->code
// ];
// }
// else
// {
// echo (object) [
// "code" => "SUCCESS"
// ];
// }
});
}
break;
}
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\ajax;
use Rehike\Network;
use Rehike\Util\RichShelfUtils;
use Rehike\Util\Base64Url;
use Rehike\Model\Browse\InnertubeBrowseConverter;
use Com\Youtube\Innertube\Helpers\VideosContinuationWrapper;
use function Rehike\Async\async;
/**
* Controller for browse AJAX requests.
*
* @author Aubrey Pankow <[email protected]>
* @author Taniko Yamamoto <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends \Rehike\Controller\core\AjaxController
{
public $template = "ajax/browse";
public function onGet(&$yt, $request)
{
return $this->onPost($yt, $request);
}
public function onPost(&$yt, $request)
{
return async(function() use (&$yt, $request)
{
if (!isset($request->params->continuation)) self::error();
$continuation = $request->params->continuation;
$contWrapper = new VideosContinuationWrapper();
$contWrapper->mergeFromString(Base64Url::decode($continuation));
$list = false;
$wrap = false;
if ($contWrapper->getContinuation() != "")
{
$continuation = $contWrapper->getContinuation();
$list = $contWrapper->getList();
$wrap = $contWrapper->getWrapInGrid();
}
$response = yield Network::innertubeRequest(
action: "browse",
body: [
"continuation" => $continuation
]
);
$ytdata = $response->getJson();
if (isset($ytdata->onResponseReceivedActions))
{
foreach ($ytdata->onResponseReceivedActions as $action)
{
if (isset($action->appendContinuationItemsAction))
{
foreach ($action->appendContinuationItemsAction->continuationItems as &$item)
{
switch (true)
{
case isset($item->continuationItemRenderer):
if (!$list && !$wrap)
{
$yt->page->continuation = $item->continuationItemRenderer->continuationEndpoint->continuationCommand->token;
}
else
{
$nContWrapper = new VideosContinuationWrapper();
$nContWrapper->setContinuation($yt->page->continuation = $item->continuationItemRenderer->continuationEndpoint->continuationCommand->token);
$nContWrapper->setList($list);
$nContWrapper->setWrapInGrid($wrap);
$yt->page->continuation = Base64Url::encode($nContWrapper->serializeToString());
}
break;
case isset($item->richItemRenderer):
$item = RichShelfUtils::reformatShelfItem($item, $list);
break;
case isset($item->richSectionRenderer->content->richShelfRenderer):
$item = RichShelfUtils::reformatShelf($item, $list);
break;
}
}
$yt->page->items = $action->appendContinuationItemsAction->continuationItems;
}
}
}
else
{
self::error();
}
$yt->page->items =
InnertubeBrowseConverter::generalLockupConverter(
$yt->page->items,
[
"listView" => $list,
"channelRendererUnbrandedSubscribeButton" => true
]
);
if ($wrap)
{
$yt->page->items = [
(object) [
"shelfRenderer" => (object) [
"content" => (object) [
"gridRenderer" => (object) [
"contents" => $yt->page->items
]
]
]
]
];
}
$yt->page->target = $request->params->target_id;
$yt->page->response = $ytdata;
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\ajax;
use Rehike\Controller\core\AjaxController;
use Rehike\Network;
/**
* Related (watch) ajax controller
*
* @author Aubrey Pankow <[email protected]>
* @author Daylin Cooper <[email protected]>
* @author Taniko Yamamoto <[email protected]>
* @author The Rehike Maintainers
*
* @version 1.0.20220805
*/
class AjaxRelatedController extends AjaxController {
public $useTemplate = true;
public $template = "ajax/related";
public function onGet(&$yt, $request) {
return $this->onPost($yt, $request);
}
public function onPost(&$yt, $request) {
$this->spfIdListeners = [
'@masthead_search<data-is-crosswalk>',
'watch-more-related'
];
if (!isset($_GET["continuation"])) {
die('{"name":"other"}');
}
Network::innertubeRequest(
action: "next",
body: [
"continuation" => $_GET["continuation"]
]
)->then(function ($response) use ($yt) {
$ytdata = $response->getJson();
$yt->page->items = $ytdata
->onResponseReceivedEndpoints[0]
->appendContinuationItemsAction
->continuationItems
;
});
}
}
return new AjaxRelatedController(); | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\ajax;
use Rehike\Controller\core\AjaxController;
use Rehike\Model\Comments\CommentThread;
use Rehike\Model\Comments\CommentsHeader;
use Rehike\Model\Appbar\MAppbar as Appbar;
use Rehike\Network;
use Rehike\ConfigManager\ConfigManager;
/**
* Watch fragments ajax controller
*
* @author Aubrey Pankow <[email protected]>
* @author Daylin Cooper <[email protected]>
* @author Taniko Yamamoto <[email protected]>
* @author The Rehike Maintainers
*
* @version 1.0.20220805
*/
class AjaxWatchFragments2Controller extends AjaxController {
public $useTemplate = true;
// 404 by default.
// The real template will be set by subcontroller functions.
public $template = '404';
public function onPost(&$yt, $request) {
$fragsId = $_GET['frags'] ?? '';
switch ($fragsId) {
case 'comments':
self::getComments($yt);
break;
case 'guide':
self::getGuide($yt);
break;
case '':
default:
break;
}
}
private function getGuide(&$yt) {
$this->template = "common/appbar/appbar_guide";
$this->spfIdListeners = [
'@masthead_search<data-is-crosswalk>',
'guide'
];
$yt->appbar = new Appbar();
$this->getPageGuide()->then(function ($guide) use ($yt) {
$yt->appbar->addGuide($guide);
});
}
private function getComments(&$yt) {
$this->template = 'common/watch/watch_fragments2/comments';
$yt->page = (object) [];
$yt->commentsRenderer = (object) [
"headerRenderer" => (object)[],
"comments" => (object)[]
];
$this->spfIdListeners = [
'@masthead_search<data-is-crosswalk>',
'watch-discussion'
];
Network::innertubeRequest(
action: "next",
body: [ "continuation" => $_GET['ctoken'] ]
)->then(function($response) use (&$yt) {
$ytdata = $response->getJson();
$yt->commentsRenderer->headerRenderer = CommentsHeader::fromData(
data: $ytdata->onResponseReceivedEndpoints[0]->reloadContinuationItemsCommand->continuationItems[0]->commentsHeaderRenderer,
id: ConfigManager::getConfigProp("appearance.allCommentsLink") ? $_GET["v"] : null
);
/**
* Comments Threads Rewrite
* TODO: further rewrite may be necessary
*/
$_oct = $ytdata->onResponseReceivedEndpoints[1]->reloadContinuationItemsCommand; // original comment threads
CommentThread::bakeComments($_oct)->then(function ($value) use ($yt, $_oct) {
$yt->commentsRenderer->comments = $value;
});
});
}
}
return new AjaxWatchFragments2Controller(); | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\ajax;
use \Rehike\Signin\API as SignIn;
return new class extends \Rehike\Controller\core\AjaxController
{
public $template = "ajax/delegate_account";
protected $spfIdListeners = [
"yt-delegate-accounts"
];
public function onGet(&$yt, $request)
{
return $this->onPost($yt, $request);
}
public function onPost(&$yt, $request)
{
if (!SignIn::isSignedIn())
{
self::error();
}
$info = SignIn::getInfo();
$channelList = [];
foreach ($info["channelPicker"] as $channel)
{
$channelList[] = (object) $channel;
}
for ($i = 0; $i < count($channelList); $i++)
{
if ($channelList[$i]->selected)
{
array_splice($channelList, $i, 1);
$i--;
}
}
$yt->page = $channelList;
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\ajax;
use Rehike\ControllerV2\RequestMetadata;
use Rehike\Network;
use Rehike\Async\Promise;
use Rehike\Util\Base64Url;
use Com\Youtube\Innertube\Request\EventReminderRequestParams;
use Com\Youtube\Innertube\Request\EventReminderRequestParams\UnknownThing;
/**
* Controller for the live event reminders AJAX.
*
* @author Aubrey Pankow <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends \Rehike\Controller\core\AjaxController {
public $useTemplate = false;
public function onPost(&$yt, $request) {
$action = self::findAction();
switch ($action) {
case "set_reminder":
$request = self::setReminder($request);
break;
case "remove_reminder":
$request = self::removeReminder($request);
break;
default:
self::error();
return;
}
$request->then(function ($ytdata) {
if (isset($ytdata->errors)) {
self::error();
} else {
http_response_code(200);
echo '{"response":"SUCCESS"}';
}
});
}
/**
* Set a live event reminder.
*/
private static function setReminder(RequestMetadata $request): Promise {
return new Promise(function ($resolve) use ($request) {
$params = new EventReminderRequestParams();
if (!isset($request->params->vid)) {
self::error();
}
$params->setVideoId($request->params->vid);
$thing = new UnknownThing();
$thing->setUnknownValue(0);
$thing->setUnknownValue2(0);
$params->setUnknownThing($thing);
Network::innertubeRequest(
action: "notification/add_upcoming_event_reminder",
body: [
"params" => Base64Url::encode($params->serializeToString())
]
)->then(function ($response) use ($resolve) {
$resolve( $response->getJson() );
});
});
}
/**
* Remove a live event reminder.
*/
private static function removeReminder(RequestMetadata $request): Promise {
return new Promise(function ($resolve) use ($request) {
$params = new EventReminderRequestParams();
if (!isset($request->params->vid)) {
self::error();
}
$params->setVideoId($request->params->vid);
$thing = new UnknownThing();
$thing->setUnknownValue(0);
$thing->setUnknownValue2(0);
$params->setUnknownThing($thing);
Network::innertubeRequest(
action: "notification/remove_upcoming_event_reminder",
body: [
"params" => Base64Url::encode($params->serializeToString())
]
)->then(function ($response) use ($resolve) {
$resolve( $response->getJson() );
});
});
}
};
| {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use \Rehike\Controller\core\AjaxController;
use \Rehike\Network;
use \Rehike\Async\Promise;
use \Rehike\Model\Common\Subscription\MSubscriptionPreferencesOverlay;
/**
* Controller for subscription actions.
*
* This includes subscribing, unsubscribing, and getting subscription
* preferences.
*
* @author Aubrey Pankow <[email protected]>
* @author Taniko Yamamoto <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends AjaxController {
// These are used by the preferences overlay response.
public $useTemplate = false;
public $template = "";
public function onPost(&$yt, $request) {
$action = self::findAction();
switch ($action) {
case "create_subscription_to_channel":
$request = self::createSubscriptionToChannel();
break;
case "remove_subscriptions":
$request = self::removeSubscriptions();
break;
case "get_subscription_preferences_overlay":
$this->useTemplate = true;
$this->template =
"ajax/subscription/get_subscription_preferences_overlay"
;
self::getPreferencesOverlay($yt, $request);
return; // This takes control of everything from here.
default:
self::error();
break;
}
$request->then(function ($ytdata) {
if (is_null($ytdata)) self::error();
if (!isset($ytdata->error)) {
http_response_code(200);
echo json_encode((object) [
"response" => "SUCCESS"
]);
} else self::error();
});
}
/**
* Create a subscription to a channel.
*
* @param object $yt Template data.
* @param RequestMetadata $request Request data.
*/
private static function createSubscriptionToChannel(): Promise {
return new Promise(function ($resolve) {
Network::innertubeRequest(
action: "subscription/subscribe",
body: [
"channelIds" => [
$_GET["c"] ?? null
],
"params" => $_POST["params"] ?? null
]
)->then(function ($response) use ($resolve) {
$resolve( $response->getJson() );
});
});
}
/**
* Remove a subscription from a channel.
*
* @param object $yt Template data.
* @param RequestMetadata $request Request data.
*/
private static function removeSubscriptions(): Promise {
return new Promise(function ($resolve) {
Network::innertubeRequest(
action: "subscription/unsubscribe",
body: [
"channelIds" => [
$_GET["c"] ?? null
]
]
)->then(function ($response) use ($resolve) {
$resolve( $response->getJson() );
});
});
}
/**
* Get the subscription preferences overlay.
*
* @param object $yt Template data.
* @param RequestMetadata $request Request data.
*/
private static function getPreferencesOverlay(&$yt,
$request): void {
Network::innertubeRequest(
action: "browse",
body: [
"browseId" => $_POST["c"] ?? ""
]
)->then(function ($response) use ($yt) {
$ytdata = $response->getJson();
$header = $ytdata->header->c4TabbedHeaderRenderer ?? null;
$yt->page = new MSubscriptionPreferencesOverlay([
"title" => $header->title ?? "",
"options" => ($header
->subscribeButton
->subscribeButtonRenderer
->notificationPreferenceButton
->subscriptionNotificationToggleButtonRenderer
->command
->commandExecutorCommand
->commands[0]
->openPopupAction
->popup
->menuPopupRenderer
->items) ?? []
]);
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use \Rehike\Controller\core\AjaxController;
use \Rehike\Network;
use \Rehike\Model\Picker\PickerModel;
use \Rehike\Signin\API as SignIn;
use \Rehike\Signin\AuthManager;
use \Rehike\Signin\Cacher;
/**
* Controller for the account picker AJAX endpoint.
*
* @author Aubrey Pankow <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends AjaxController {
public $template = "ajax/picker";
public function onGet(&$yt, $request) {
$action = self::findAction();
Network::innertubeRequest(
action: "account/account_menu",
body: [
"deviceTheme" => "DEVICE_THEME_SUPPORTED",
"userInterfaceTheme" => "USER_INTERFACE_THEME_LIGHT"
]
)->then(function ($response) use ($yt, $action) {
$ytdata = $response->getJson();
$yt->page = PickerModel::bake($ytdata, $action);
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use Rehike\Async\Promise;
use \Rehike\Controller\core\AjaxController;
use \Rehike\Network;
use \Rehike\TemplateFunctions;
use \Rehike\Model\Share\ShareBoxModel;
use \Rehike\Model\Share\ShareEmbedModel;
use function Rehike\Async\async;
return new class extends AjaxController
{
private ?string $videoId;
private ?string $listId;
public function onGet(&$yt, $request)
{
$action = self::findAction();
if (is_null($action) || !isset($request->params->video_id)) self::error();
$this->videoId = $request->params->video_id;
$this->listId = $request->params->list;
switch ($action)
{
case "get_share_box":
self::getShareBox($yt, $request);
break;
case "get_embed":
self::getEmbed($yt, $request);
break;
}
}
/**
* Get the share box.
*/
private function getShareBox(&$yt, $request): Promise/*<void>*/
{
return async(function() use (&$yt, $request) {
$this->template = "ajax/share/get_share_box";
$priInfo = yield self::videoInfo($this->videoId);
$yt->page = ShareBoxModel::bake(
videoId: $this->videoId,
title: TemplateFunctions::getText($priInfo->title),
listId: $this->listId
);
});
}
private function getEmbed(&$yt, $request): Promise/*<void>*/
{
return async(function() use (&$yt, $request) {
$this->template = "ajax/share/get_embed";
$priInfo = yield self::videoInfo($this->videoId);
$listData = null;
if ($this->listId)
{
$listData = yield Network::innertubeRequest(
action: "browse",
body: [
"browseId" => "VL" . $this->listId
]
);
}
$yt->page = ShareEmbedModel::bake(
videoId: $this->videoId,
title: TemplateFunctions::getText($priInfo->title),
listData: $listData
);
});
}
protected function videoInfo(string $videoId): Promise/*<object>*/
{
return async(function() use ($videoId) {
$response = yield Network::innertubeRequest(
action: "next",
body: [
"videoId" => $videoId
]
);
$ytdata = $response->getJson();
$results = $ytdata->contents->twoColumnWatchNextResults->results->results->contents ?? [];
for ($i = 0; $i < count($results); $i++)
{
if (isset($results[$i]->videoPrimaryInfoRenderer))
{
$priInfo = $results[$i]->videoPrimaryInfoRenderer;
}
}
if (!isset($priInfo))
{
http_response_code(400);
echo $response;//"{\"errors\":[]}";
die();
}
return $priInfo;
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use \Rehike\Controller\core\AjaxController;
use \Rehike\Network;
/**
* Controller for playlist AJAX endpoints.
*
* @author Aubrey Pankow <[email protected]>
* @author Taniko Yamamoto <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends AjaxController {
public $useTemplate = false;
public function onPost(&$yt, $request) {
$action = self::findAction();
if ($action == "add_to_watch_later_list") {
self::validatePostVideoIds();
$videoId = $_POST["video_ids"];
self::addToPlaylist($videoId, "WL");
} else if ($action == "delete_from_watch_later_list") {
self::validatePostVideoIds();
$videoId = $_POST["video_ids"];
self::removeFromPlaylist($videoId, "WL");
} else if ($action == "add_to_playlist") {
self::validatePostVideoIds();
$videoId = $_POST["video_ids"];
$listId = $_POST["full_list_id"];
self::addToPlaylist($videoId, $listId);
// Because YouTube's own server is a bit weird, this
// might go too fast and break everything.
// Hence: very gross fix for a server-side bug
sleep(3);
} else if ($action == "delete_from_playlist") {
self::validatePostVideoIds();
$videoId = $_POST["video_ids"];
$listId = $_POST["full_list_id"];
self::removeFromPlaylist($videoId, $listId);
sleep(3);
} else if (isset($action)) {
http_response_code(400);
echo json_encode((object) [
"errors" => [
(object) [
"Illegal action $action."
]
]
]);
} else {
http_response_code(400);
echo json_encode((object) [
"errors" => [
(object) [
"Specify an action."
]
]
]);
}
}
/**
* Check if the request includes the POST form parameter for video_ids.
*
* If it isn't set, then it's an illegal request and this will reject the
* request.
*/
protected static function validatePostVideoIds(): void
{
if(!isset($_POST["video_ids"])) {
http_response_code(400);
echo json_encode((object) [
"errors" => [
(object) [
"Specify a video ID!"
]
]
]);
}
}
/**
* Add a video to a playlist.
*/
protected static function addToPlaylist(
string $videoId,
string $plId
): void
{
Network::innertubeRequest(
action: "browse/edit_playlist",
body: [
"playlistId" => $plId,
"actions" => [
(object) [
"addedVideoId" => $videoId,
"action" => "ACTION_ADD_VIDEO"
]
]
]
)->then(function ($response) {
$ytdata = $response->getJson();
if ($ytdata->status = "STATUS_SUCCEEDED") {
http_response_code(200);
echo json_encode((object) []);
} else {
http_response_code(400);
echo json_encode((object) [
"errors" => [
(object) [
"Failed to add video to playlist"
]
]
]);
}
});
}
/**
* Remove a video from a playlist.
*/
protected static function removeFromPlaylist(
string $videoId,
string $plId
): void
{
Network::innertubeRequest(
action: "browse/edit_playlist",
body: [
"playlistId" => $plId,
"actions" => [
(object) [
"removedVideoId" => $videoId,
"action" => "ACTION_REMOVE_VIDEO_BY_VIDEO_ID"
]
]
]
)->then(function ($response) {
$ytdata = $response->getJson();
if ($ytdata->status = "STATUS_SUCCEEDED") {
http_response_code(200);
echo json_encode((object) []);
} else {
http_response_code(400);
echo json_encode((object) [
"errors" => [
(object) [
"Failed to remove video from playlist"
]
]
]);
}
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\ajax;
use Rehike\Controller\core\AjaxController;
use Rehike\i18n;
use Rehike\Network;
use Rehike\Model\AddTo\MAddTo as AddTo;
return new class extends AjaxController {
public $useTemplate = true;
public $template = "ajax/addto";
public $contentType = "application/xml";
public function onGet(&$yt, $request) {
return $this->onPost($yt, $request);
}
public function onPost(&$yt, $request) {
i18n::newNamespace("addto")->registerFromFolder("i18n/addto");
// Because YouTube's own server is a bit weird, this
// might go too fast and break everything.
// Hence: very gross fix for a server-side bug
sleep(3);
Network::innertubeRequest(
action: "playlist/get_add_to_playlist",
body: [
"videoIds" => explode(",", $_POST["video_ids"]) ?? [""]
]
)->then(function ($response) use ($yt) {
$data = $response->getJson();
$lists = $data->contents[0]->addToPlaylistRenderer->playlists;
$yt->page->addto = new AddTo($lists);
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use \Rehike\Controller\core\AjaxController;
use \Rehike\Network;
/**
* Controller for the common service AJAX endpoint.
*
* This includes things like liking videos.
*
* @author Aubrey Pankow <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends AjaxController {
public $useTemplate = false;
public function onPost(&$yt, $request) {
if (!@$request->params->name) self::error();
$endpoint = $request->params->name;
switch ($endpoint) {
case "likeEndpoint":
self::likeEndpoint();
break;
default:
self::error();
break;
}
}
/**
* Like endpoint.
*/
private static function likeEndpoint() {
$action = $_POST["action"];
$videoId = $_POST["id"];
Network::innertubeRequest(
action: "like/$action",
body: [
"target" => [
"videoId" => $videoId
]
]
)->then(function ($response) {
$ytdata = $response->getJson();
if (!@$ytdata->errors) {
http_response_code(200);
echo json_encode((object) [
"code" => "SUCCESS"
]);
die();
} else {
self::error();
}
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use \Rehike\Controller\core\AjaxController;
use \Rehike\Model\Comments\CommentThread;
use \Rehike\Network;
/**
* Controller for the primary comment service AJAX.
*
* @author Aubrey Pankow <[email protected]>
* @author Taniko Yamamoto <[email protected]>
* @author The Rehike Maintainers
*/
return new class extends AjaxController {
public function onPost(&$yt, $request) {
$action = self::findAction();
if (!@$action) self::error();
$yt->page = (object) [];
switch ($action) {
case "create_comment":
self::createComment($yt);
break;
case "create_comment_reply":
self::createCommentReply($yt);
break;
case "get_comments":
self::getComments($yt);
break;
case "get_comment_replies":
self::getCommentReplies($yt);
break;
case "perform_comment_action":
self::performCommentAction();
break;
}
}
/**
* Create a comment.
*
* @param $yt Template data.
*/
private function createComment(&$yt) {
$this->template = "ajax/comment_service/create_comment";
$content = $_POST["content"] ?? null;
$params = $_POST["params"] ?? null;
// Reject invalid arguments.
if((@$content == null) | (@$params == null))
{
self::error();
}
Network::innertubeRequest(
action: "comment/create_comment",
body: [
"commentText" => $_POST["content"],
"createCommentParams" => $_POST["params"]
]
)->then(function ($response) use ($yt) {
$ytdata = $response->getJson();
$data = $ytdata->actions[1] ->createCommentAction->contents
->commentThreadRenderer ?? null;
$cids = [];
$cids[] = $data->comment->commentRenderer->authorEndpoint->browseEndpoint->browseId;
foreach ($data->comment->commentRenderer->contentText->runs as $run)
{
if ($a = @$run->navigationEndpoint->browseEndpoint->browseId)
{
if (!in_array($a, $cids))
$cids[] = $a;
}
}
CommentThread::populateDataApiData($cids)
->then(function() use (&$yt, $data) {
if (null != $data)
{
$yt->page = CommentThread::commentThreadRenderer($data);
}
else
{
self::error();
}
});
});
}
/**
* Create a reply to a comment.
*
* @param $yt Template data.
*/
private function createCommentReply(&$yt) {
$this->template = "ajax/comment_service/create_comment_reply";
$content = $_POST["content"] ?? null;
$params = $_POST["params"] ?? null;
// Reject invalid arguments.
if((@$content == null) | (@$params == null))
{
self::error();
}
Network::innertubeRequest(
action: "comment/create_comment_reply",
body: [
"commentText" => $_POST["content"],
"createReplyParams" => $_POST["params"]
]
)->then(function ($response) use ($yt) {
$ytdata = $response->getJson();
$data = $ytdata->actions[1] ->createCommentReplyAction
->contents->commentRenderer ?? null;
$cids = [];
$cids[] = $data->authorEndpoint->browseEndpoint->browseId;
foreach ($data->contentText->runs as $run)
{
if ($a = @$run->navigationEndpoint->browseEndpoint->browseId)
{
if (!in_array($a, $cids))
$cids[] = $a;
}
}
CommentThread::populateDataApiData($cids)
->then(function() use (&$yt, $data) {
if (null != $data)
{
$yt->page = CommentThread::commentRenderer($data, true);
}
else
{
self::error();
}
});
});
}
/**
* Get comments for continuation or
* reload (for changing sort).
*
* @param $yt Template data.
*/
private function getComments(&$yt) {
$this->template = "ajax/comment_service/get_comments";
$ctoken = $_POST["page_token"] ?? null;
if(!@$ctoken) self::error();
Network::innertubeRequest(
action: "next",
body: [
"continuation" => $_POST["page_token"]
]
)->then(function ($response) use ($yt) {
$ytdata = $response->getJson();
foreach ($ytdata->onResponseReceivedEndpoints as $endpoint)
{
if ($a = $endpoint->appendContinuationItemsAction)
{
$data = $a;
}
else if ($a = $endpoint->reloadContinuationItemsCommand)
{
$data = $a;
}
}
if (!is_null($data))
{
CommentThread::bakeComments($data)->then(function ($response)
use ($yt)
{
$yt->page = $response;
});
}
else
{
self::error();
}
});
}
/**
* Get comment replies.
*
* @param $yt Template data.
*/
private function getCommentReplies(&$yt) {
$this->template = "ajax/comment_service/get_comment_replies";
$ctoken = $_POST["page_token"] ?? null;
if (!@$ctoken) self::error();
Network::innertubeRequest(
action: "next",
body: [
"continuation" => $_POST["page_token"]
]
)->then(function ($response) use ($yt) {
$ytdata = $response->getJson();
foreach ($ytdata->onResponseReceivedEndpoints as $endpoint) {
if ($a = $endpoint->appendContinuationItemsAction) {
$data = $a;
}
}
if (!is_null($data))
{
CommentThread::bakeReplies($data)->then(function ($response)
use ($yt)
{
$yt->page = $response;
});
}
else
{
self::error();
}
});
}
/**
* Perform a comment action
* (Like, dislike, heart, etc.)
*/
private function performCommentAction() {
$this->useTemplate = false;
Network::innertubeRequest(
action: "comment/perform_comment_action",
body: [
"actions" => [
$_POST["action"]
]
]
)->then(function ($response) {
$ytdata = $response->getJson();
if (@$ytdata->actionResults[0] ->status == "STATUS_SUCCEEDED") {
echo json_encode((object) [
"response" => "SUCCESS"
]);
} else {
self::error();
}
});
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\Special;
use function Rehike\Async\async;
use Rehike\SimpleFunnel;
use Rehike\SimpleFunnelResponse;
use YukisCoffee\CoffeeRequest\Network\Response;
use Rehike\Controller\core\HitchhikerController;
return new class extends HitchhikerController
{
public const YTCFG_REGEX = "/ytcfg\.set\(({.*?})\);/";
public $useTemplate = false;
public function onGet(&$yt, $request)
{
return async(function() use (&$yt, &$request) {
$chatData = yield SimpleFunnel::funnelCurrentPage();
$chatHtml = $chatData->getText();
$matches = [];
preg_match(self::YTCFG_REGEX, $chatHtml, $matches);
if (!isset($matches[1]))
self::error("Could not find ytcfg");
$ytcfg = json_decode($matches[1]);
// Store the original ytcfg to replace in the HTML
$oytcfg = $matches[1];
if (is_null($ytcfg))
self::error("Could not decode ytcfg");
// Force light mode
$ytcfg->LIVE_CHAT_ALLOW_DARK_MODE = false;
// Configure experiment flags to disable
// new icons and the color update
if (!is_object($ytcfg->EXPERIMENT_FLAGS))
{
$ytcfg->EXPERIMENT_FLAGS = (object) [];
}
$exps = &$ytcfg->EXPERIMENT_FLAGS;
$exps->kevlar_system_icons = false;
$exps->web_darker_dark_theme = false;
$exps->kevlar_watch_color_update = false;
$exps->web_sheets_ui_refresh = false;
$chatHtml = str_replace($oytcfg, json_encode($ytcfg), $chatHtml);
// PHP doesn't let you cast objects (like: (array) $chatData->headers)
// and the Response constructor does not accept ResponseHeaders for
// the headers so we must convert it manually
$headers = [];
foreach ($chatData->headers as $name => $value) $headers[$name] = $value;
SimpleFunnelResponse::fromResponse(
new Response(
$chatData->sourceRequest,
$chatData->status,
$chatHtml,
$headers
)
)->output();
});
}
public static function error(string $msg): void
{
http_response_code(400);
echo "[Rehike] Fatal error while attempting to load live chat: $msg";
die();
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use Rehike\RehikeConfigManager as ConfigManager;
return new class extends \Rehike\Controller\core\AjaxController {
public $useTemplate = false;
public function onPost(&$yt, $request) {
$input = \json_decode(\file_get_contents('php://input'), true);
try {
foreach ($input as $option => $value) {
ConfigManager::setConfigProp(
$option,
$value
);
}
ConfigManager::dumpConfig();
} catch(Throwable $e) {
http_response_code(400);
}
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\rehike;
class StaticRouter {
public function get(&$yt, &$template, $request) {
$filename = "static/";
for ($i = 2; $i < count($request->path); $i++) {
if ($i == count($request->path) - 1) {
$filename .= $request->path[$i];
} else {
$filename .= $request->path[$i] . "/";
}
}
if (file_exists($filename)) {
header("Content-Type: " . mime_content_type($filename));
echo file_get_contents($filename);
exit();
} else {
http_response_code(404);
}
}
public function post(&$yt, &$template, $request) {
return $this->get($yt, $template, $request);
}
}
return new StaticRouter(); | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
namespace Rehike\Controller\Version;
use Rehike\Controller\core\HitchhikerController;
use Rehike\Version\VersionController;
use Rehike\i18n;
use Rehike\Model\Rehike\Version\MVersionPage;
/**
* Use remote Git repo if possible.
*
* Each successful request shall be cached for the next 60 minutes. This is
* to avoid hitting the request cap for unauthenticated GitHub rest API requests.
*/
class RemoteGit
{
/**
* Attempt to retrieve remote GitHub information, either
* through the GitHub API or a cached earlier download.
*
* @param string $branch
* @return object|false
*/
public static function getInfo($branch)
{
if (!GetVersionController::GH_ENABLED) return false; // Return false if GH access is not permitted.
return self::useCache($branch) ?? self::useRemote($branch) ?? false;
}
/**
* @return object|null
*/
private static function useCache($branch)
{
$response = null;
$lockFile = @file_get_contents("cache/remote-git.lock");
$lockJson = @json_decode($lockFile);
// Validate the lock file
if ($lockFile && null != $lockJson && ($branch = @$lockJson->{$branch}))
{
// Check if the cache has expired
if (@$branch->time > time() && @$branch->file)
{
// Attempt to get the get the cache file
$cacheFile = @file_get_contents($branch->file);
$cacheJson = @json_decode($cacheFile);
// Validate the cache and return it if it's valid
if ($cacheFile && null != $cacheJson)
{
return $cacheJson;
}
}
}
return $response;
}
/**
* Store a cache file and reuse it later
*
* @param string $branch
* @param string $encodedJson
* @return bool status (false on failure, true on success)
*/
private static function storeCache($branch, $encodedJson)
{
$newLockContents = (object)[];
$filename = "remote-git-{$branch}.json";
// Grab the lock file contents (if they exist)
$lockFile = @file_get_contents("cache/remote-git.lock");
$lockJson = @json_decode($lockFile);
if ($lockFile && null != $lockJson)
{
$newLockContents = &$lockJson;
}
// Store the encoded json in a file corresponding to the
// specific branch.
if (!is_dir("cache")) mkdir("cache");
$fileFailure = false;
$fh = @fopen("cache/{$filename}", "w") ?? ($fileFailure = true);
@fwrite($fh, $encodedJson) ?? ($fileFailure = true);
@fclose($fh) ?? ($fileFailure = true);
// Update the lock file
$newLockContents->{$branch} = (object)[
"time" => time() + 60 * 60,
"file" => "cache/{$filename}"
];
$fh = @fopen("cache/remote-git.lock", "w") ?? ($fileFailure = true);
@fwrite($fh, json_encode($newLockContents)) ?? ($fileFailure = true);
@fclose($fh) ?? ($fileFailure = true);
return !$fileFailure;
}
/**
* @return object|null
*/
private static function useRemote($branch)
{
$ch = curl_init(GetVersionController::GH_API_COMMITS . $branch);
curl_setopt_array($ch, [
CURLOPT_RETURNTRANSFER => true,
CURLOPT_USERAGENT => "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:99.0) Gecko/20100101 Firefox/99.0"
]);
// Attempt to request the GitHub API
$data = @curl_exec($ch);
// Validate the response (if cURL failed, it will be false)
if (!$data) return null;
if (200 != curl_getinfo($ch, CURLINFO_RESPONSE_CODE)) return null;
// If the response is JSON, attempt to decode it
$json = @json_decode($data);
if (null == $json) return null;
// Attempt to write a cache file.
self::storeCache($branch, $data)
|| trigger_error("Failed to cache Git result.", E_USER_WARNING);
// Return the remote object.
return $json;
}
}
class GetVersionController extends HitchhikerController
{
public const GH_REPO = "Rehike/Rehike";
public const GH_ENABLED = true;
public const GH_API_COMMITS = "https://api.github.com/repos/" . self::GH_REPO . "/commits?sha="; // sha= + branch
/**
* Reference to Rehike\Version\VersionController::$versionInfo
*/
public static $versionInfo;
public $template = "rehike/version";
public function onGet(&$yt, $request)
{
i18n::newNamespace("rehike/version")->registerFromFolder("i18n/rehike/version");
$yt->page = (object)self::bake();
}
public static function bake()
{
self::$versionInfo = &VersionController::$versionInfo;
$initStatus = VersionController::init();
if (false == $initStatus)
{
return new MVersionPage(null);
}
// If remote git is expected, report it
if (self::GH_ENABLED) self::$versionInfo += ["expectRemoteGit" => true];
// ...and attempt to use it
if (
@self::$versionInfo["branch"] &&
( $rg = RemoteGit::getInfo(self::$versionInfo["branch"]) )
)
{
self::$versionInfo += ["remoteGit" => $rg];
// If the previous commit is reported, but not the current commit,
// attempt to retrieve the current commit hash from git.
if (@self::$versionInfo["previousHash"] && !@self::$versionInfo["currentHash"])
for ($i = 1, $l = count($rg); $i < $l; $i++)
{
$currentItem = &$rg[$i];
$previousItem = &$rg[$i - 1];
if (self::$versionInfo["previousHash"] == @$currentItem->sha)
{
// Previous item must be the current hash
// so use its hash
self::$versionInfo += ["currentHash" => $previousItem->sha];
}
}
}
self::$versionInfo += ["semanticVersion" => VersionController::getVersion()];
// Return a version page model.
return new MVersionPage(self::$versionInfo);
}
}
// export
return new GetVersionController(); | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
<?php
use Rehike\Model\Rehike\Config\ConfigModel;
return new class extends \Rehike\Controller\core\NirvanaController {
public $template = "rehike/config/main";
const DEFAULT_TAB = "appearance";
const VALID_TABS = [
"appearance",
"experiments",
"advanced"
];
public function onGet(&$yt, $request) {
$tab = $request->path[2] ?? self::DEFAULT_TAB;
if (!in_array($tab, self::VALID_TABS)) {
header("Location: /rehike/config/" . self::DEFAULT_TAB);
}
$yt->page = ConfigModel::bake($tab, $request->params->status ?? null);
}
}; | {
"repo_name": "Rehike/Rehike",
"stars": "81",
"repo_language": "PHP",
"file_name": "config.php",
"mime_type": "text/x-php"
} |
CC0 1.0 Universal
Statement of Purpose
The laws of most jurisdictions throughout the world automatically confer exclusive Copyright and Related Rights (defined below) upon the creator and subsequent owner(s) (each and all, an "owner") of an original work of authorship and/or a database (each, a "Work").
Certain owners wish to permanently relinquish those rights to a Work for the purpose of contributing to a commons of creative, cultural and scientific works ("Commons") that the public can reliably and without fear of later claims of infringement build upon, modify, incorporate in other works, reuse and redistribute as freely as possible in any form whatsoever and for any purposes, including without limitation commercial purposes. These owners may contribute to the Commons to promote the ideal of a free culture and the further production of creative, cultural and scientific works, or to gain reputation or greater distribution for their Work in part through the use and efforts of others.
For these and/or other purposes and motivations, and without any expectation of additional consideration or compensation, the person associating CC0 with a Work (the "Affirmer"), to the extent that he or she is an owner of Copyright and Related Rights in the Work, voluntarily elects to apply CC0 to the Work and publicly distribute the Work under its terms, with knowledge of his or her Copyright and Related Rights in the Work and the meaning and intended legal effect of CC0 on those rights.
Copyright and Related Rights. A Work made available under CC0 may be protected by copyright and related or neighboring rights ("Copyright and Related Rights"). Copyright and Related Rights include, but are not limited to, the following:
i. the right to reproduce, adapt, distribute, perform, display, communicate, and translate a Work;
ii. moral rights retained by the original author(s) and/or performer(s);
iii. publicity and privacy rights pertaining to a person's image or likeness depicted in a Work;
iv. rights protecting against unfair competition in regards to a Work, subject to the limitations in paragraph 4(a), below;
v. rights protecting the extraction, dissemination, use and reuse of data in a Work;
vi. database rights (such as those arising under Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, and under any national implementation thereof, including any amended or successor version of such directive); and
vii. other similar, equivalent or corresponding rights throughout the world based on applicable law or treaty, and any national implementations thereof.
Waiver. To the greatest extent permitted by, but not in contravention of, applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and unconditionally waives, abandons, and surrenders all of Affirmer's Copyright and Related Rights and associated claims and causes of action, whether now known or unknown (including existing as well as future claims and causes of action), in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each member of the public at large and to the detriment of Affirmer's heirs and successors, fully intending that such Waiver shall not be subject to revocation, rescission, cancellation, termination, or any other legal or equitable action to disrupt the quiet enjoyment of the Work by the public as contemplated by Affirmer's express Statement of Purpose.
Public License Fallback. Should any part of the Waiver for any reason be judged legally invalid or ineffective under applicable law, then the Waiver shall be preserved to the maximum extent permitted taking into account Affirmer's express Statement of Purpose. In addition, to the extent the Waiver is so judged Affirmer hereby grants to each affected person a royalty-free, non transferable, non sublicensable, non exclusive, irrevocable and unconditional license to exercise Affirmer's Copyright and Related Rights in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "License"). The License shall be deemed effective as of the date CC0 was applied by Affirmer to the Work. Should any part of the License for any reason be judged legally invalid or ineffective under applicable law, such partial invalidity or ineffectiveness shall not invalidate the remainder of the License, and in such case Affirmer hereby affirms that he or she will not (i) exercise any of his or her remaining Copyright and Related Rights in the Work or (ii) assert any associated claims and causes of action with respect to the Work, in either case contrary to Affirmer's express Statement of Purpose.
Limitations and Disclaimers.
a. No trademark or patent rights held by Affirmer are waived, abandoned, surrendered, licensed or otherwise affected by this document.
b. Affirmer offers the Work as-is and makes no representations or warranties of any kind concerning the Work, express, implied, statutory or otherwise, including without limitation warranties of title, merchantability, fitness for a particular purpose, non infringement, or the absence of latent or other defects, accuracy, or the present or absence of errors, whether or not discoverable, all to the greatest extent permissible under applicable law.
c. Affirmer disclaims responsibility for clearing rights of other persons that may apply to the Work or any use thereof, including without limitation any person's Copyright and Related Rights in the Work. Further, Affirmer disclaims responsibility for obtaining any necessary consents, permissions or other rights required for any use of the Work.
d. Affirmer understands and acknowledges that Creative Commons is not a party to this document and has no duty or obligation with respect to this CC0 or use of the Work.
For more information, please see https://creativecommons.org/publicdomain/zero/1.0
| {
"repo_name": "joewdavies/awesome-frontend-gis",
"stars": "159",
"repo_language": "None",
"file_name": "lint.yml",
"mime_type": "text/plain"
} |
# Contribution Guidelines
## Pull Requests
ALWAYS create a new branch with your proposed changes.
Thank you!
## Adding an new Item
- Try to fit your item into an existing sections.
Or suggest a new section and explain why your item does not fit into an existing one and what would be the interest of your new section.
- Add a new item to the bottom of the list in a section.
- If a duplicate item exists, discuss why the new item should replace it.
- Check your spelling & grammar.
- The item must follow this format:
```
- [item name](https link) - Description beginning with capital, ending in period.
```
| {
"repo_name": "joewdavies/awesome-frontend-gis",
"stars": "159",
"repo_language": "None",
"file_name": "lint.yml",
"mime_type": "text/plain"
} |
<!--lint disable double-link -->
<div align="center">
<h2>Awesome Frontend GIS <a href="https://github.com/sindresorhus/awesome">
<img src="https://cdn.rawgit.com/sindresorhus/awesome/d7305f38d29fed78fa85652e3a63e154dd8e8829/media/badge.svg" alt="Awesome" href="https://github.com/sindresorhus/awesome">
</a></h2>
Geographic Information Systems (GIS) for web browsers. For managing, analyzing, editing, and visualizing geographic data.
<div>
<a href="https://github.com/eurostat/gridviz" target="_blank">
<img src='https://user-images.githubusercontent.com/25485293/191950255-cbd83c6a-4880-4c0a-a665-b59a21467702.PNG'>
</a>
</div>
*A compilation of geospatial-related web frameworks, tools, demos, applications, data sources and more.*
</div>
## Contents
- [👨💻 **JavaScript Libraries**](#-javascript-libraries)
- [Mapping](#mapping)
- [Data Processing](#data-processing)
- [LiDAR](#lidar)
- [Remote Sensing](#remote-sensing)
- [💾 **Data sources**](#-data-sources)
- [Downloads](#downloads)
- [Web APIs](#web-apis)
- [Collections](#collections)
- [📒 **Notebooks**](#-notebooks)
- [Beginner](#beginner)
- [Intermediate](#intermediate)
- [Advanced](#advanced)
- [🗺️ **Web maps**](#world_map-web-maps)
- [🌐 **Web apps**](#-web-apps)
- [🎨 **Colour advice**](#-colour-advice)
- [📍 **Icons**](#-icons)
- [📺 **Videos**](#-videos)
- [📚 **Further reading**](#-further-reading)
## 👨💻 JavaScript Libraries
### Mapping
Libraries for creating web maps:
- [Leaflet](https://leafletjs.com/) - The leading open-source JavaScript library for mobile-friendly interactive maps.
- [OpenLayers](https://openlayers.org/) - A high-performance, feature-packed library for creating interactive maps on the web.
- [Cesium.js](https://cesiumjs.org/) - An open-source JavaScript library for world-class 3D mapping of geospatial data.
- [maplibre](https://github.com/maplibre/maplibre-gl-js) - It originated as an open-source fork of mapbox-gl-js, before their switch to a non-OSS license in December 2020.
- [Deck.GL](https://github.com/uber/deck.gl) - WebGL2 powered geospatial visualization layers.
- [MapTalks.js](https://github.com/maptalks/maptalks.js) - An open-source JavaScript library for integrated 2D/3D maps.
- [antvis L7](https://github.com/antvis/L7) - Large-scale WebGL-powered Geospatial Data Visualization.
- [Tangram](https://github.com/tangrams/tangram) - WebGL map rendering engine for creative cartography.
- [TerriaJS](https://github.com/TerriaJS/terriajs) - TerriaJS is a library for building rich, web-based geospatial data explorers.
- [gridviz](https://github.com/eurostat/gridviz) - A package for visualizing gridded data.
- [Eurostat-map](https://github.com/eurostat/eurostat-map.js) - Create and customise web maps showing Eurostat data using D3.js.
- [Bertin.js](https://github.com/neocarto/bertin) - A JavaScript library for visualizing geospatial data and making thematic maps for the web.
- [regl-map-animation](https://github.com/eurostat/regl-map-animation) - Animate x/y point data using regl and categorize them into a bar chart.
- [iTowns](https://github.com/iTowns/itowns) - A Three.js-based framework written in JavaScript/WebGL for visualizing 3D geospatial data.
- [globe.gl](https://globe.gl/) - This library is a convenience wrapper around the three-globe plugin, and uses ThreeJS/WebGL for 3D rendering.
- [d3-geo](https://github.com/d3/d3-geo) - A library for creating maps based on D3.js.
- [d3-geo-projection](https://github.com/d3/d3-geo-projection) - Extended geographic projections.
- [d3-geo-voronoi](https://github.com/Fil/d3-geo-voronoi) - Voronoi diagrams and Delaunay triangulation for the sphere.
- [d3-inertia](https://github.com/Fil/d3-inertia) - An extension to d3-drag that continues the mouse movement with some inertia.
- [datamaps](https://github.com/markmarkoh/datamaps) - Customizable map visualizations in one file.
- [react-simple-maps](https://github.com/zcreativelabs/react-simple-maps) - An SVG mapping component library for React, built on top of d3-geo.
- [Google Maps](https://developers.google.com/maps/documentation/javascript) - Google Maps API for JavaScript.
- [Wrld.js](https://github.com/wrld3d/wrld.js/) - Animated 3D city maps based on Leaflet.
- [Mapbox GL JS](https://docs.mapbox.com/mapbox-gl-js/examples/) - JavaScript library that uses WebGL to render interactive maps from vector tiles.
- [ArcGIS API for JS](https://developers.arcgis.com/JavaScript/latest/release-notes/) - A lightweight way to embed maps and tasks in web applications.
- [HERE maps API](https://developer.here.com/develop/javascript-api) - Build web applications with feature-rich and customizable HERE maps.
- [Map Forecast API](https://github.com/windycom/API) - Simple-to-use library based on Leaflet 1.4.x. It allows you to show wind maps.
### Data Processing
Libraries that help you analyse and process geospatial data:
- [geolib](https://github.com/manuelbieh/geolib) - Library to provide basic geospatial operations like distance calculation, conversion of decimal coordinates to sexagesimal, etc.
- [Turf.js](https://github.com/Turfjs/turf) - Turf is a JavaScript library for spatial analysis.
- [JSTS](https://github.com/bjornharrtell/jsts) - JavaScript Topology Suite.
- [flatten-js](https://github.com/alexbol99/flatten-js) - For manipulating geometrical shapes, finding intersections, checking inclusion, calculating distance, transformations and more.
- [flatbush](https://github.com/mourner/flatbush) - A really fast static spatial index for 2D points and rectangles in JavaScript.
- [rbush](https://github.com/mourner/rbush) - RBush is a high-performance JavaScript library for 2D spatial indexing of points and rectangles.
- [Geometric.js](https://github.com/HarryStevens/geometric) - A JavaScript library for doing geometry.
- [Euclid.ts](https://github.com/mathigon/euclid.js) - 2D Euclidean geometry classes, utilities, and drawing tools.
- [Proj4js](https://github.com/proj4js/proj4js) - Transform coordinates from one coordinate system to another, including datum transformations.
- [GeoTiff.js](https://github.com/geotiffjs/geotiff.js) - Parse TIFF files for visualization or analysis.
- [Arc.js](https://github.com/springmeyer/arc.js) - Calculate great circles routes as lines in GeoJSON or WKT format.
- [awesome-GeoJSON](https://github.com/tmcw/awesome-geojson) - Catalogue of GeoJSON tools.
- [topoJSON](https://github.com/topojson/topojson) - Convert GeoJSON to TopoJSON for use in D3 maps.
- [d3-geo-polygon](https://github.com/d3/d3-geo-polygon) - Clipping and geometric operations for spherical polygons.
- [Wicket](https://github.com/arthur-e/Wicket) - A modest library for moving between Well-Known Text (WKT) and various framework geometries.
- [koop](https://github.com/koopjs/koop) - Koop is a JavaScript toolkit for connecting incompatible spatial APIs.
- [spl.js](https://github.com/jvail/spl.js) - Makes it possible to use SpatiaLite functionality in JavaScript. Behind the scenes, a WebAssembly port of SpatiaLite is used.
- [geotoolbox](https://github.com/neocarto/geotoolbox) - Provides several GIS operations for use with geojson properties. Useful for thematic cartography.
- [supercluster](https://www.npmjs.com/package/supercluster) - A very fast JavaScript library for geospatial point clustering for browsers and Node.
- [geoblaze](https://github.com/GeoTIFF/geoblaze) - A blazing fast JavaScript raster processing engine. Using geoblaze, you can run computations ranging from basic statistics (min, max, mean, mode) to band arithmetic and histogram generation in either a web browser or a node application.
- [geopackage-js](https://github.com/ngageoint/geopackage-js) - The GeoPackage JavaScript library currently provides the ability to read GeoPackage files.
- [geojson-merge](https://github.com/mapbox/geojson-merge) - Merge multiple GeoJSON files into one FeatureCollection.
- [Galton](https://github.com/urbica/galton) - Lightweight Node.js isochrone server. Build isochrones using OSRM, Turf and concaveman.
- [geojson-vt](https://github.com/mapbox/geojson-vt) - A highly efficient JavaScript library for slicing GeoJSON data into vector tiles on the fly.
- [geobuf](https://github.com/mapbox/geobuf) - Geobuf is a compact binary encoding for geographic data.
- [geoparquet](https://github.com/opengeospatial/geoparquet) - Encoding geospatial data in Apache Parquet.
- [statsbreaks](https://github.com/riatelab/statsbreaks) - Split (classify/discretize) a quantitative dataset into a (k) number of classes or thematic categories. Useful for creating a choropleth map.
- [gdal3.js](https://github.com/bugra9/gdal3.js) - Convert raster and vector geospatial data to various formats and coordinate systems entirely in the browser.
- [math.gl](https://github.com/uber-web/math.gl) - JavaScript math library focused on Geospatial and 3D use cases.
### LiDAR
Tools for visualizing point clouds with web browsers:
- [Potree](https://github.com/potree/potree) - WebGL point cloud viewer for large datasets.
- [Plasio](https://github.com/verma/plasio) - Drag-n-drop In-browser LAS/LAZ point cloud viewer.
- [Potree & Cesium.js](http://potree.org/potree/examples/cesium_retz.html) - Rezt, Austria LIDAR viewer.
- [Three.js](https://threejs.org/examples/#webgl_loader_pcd) - Point cloud data loader.
### Remote Sensing
Resources for frontend earth observation and remote sensing:
- [Google Earth Engine](https://developers.google.com/earth-engine/tutorials/tutorial_api_01) - Geospatial processing service.
- [sentinelhub-js](https://github.com/sentinel-hub/sentinelhub-js/) - Download and process satellite imagery in JavaScript or TypeScript using Sentinel Hub services.
- [Sentinel Hub custom scripts](https://github.com/sentinel-hub/custom-scripts) - A repository of custom scripts to be used with Sentinel Hub.
- [Spectral](https://github.com/awesome-spectral-indices) - Awesome Spectral Indices for the Google Earth Engine JavaScript API (Code Editor).
- [EOSDIS Worldview](https://github.com/nasa-gibs/worldview) - Interactive interface for browsing global, full-resolution satellite imagery.
## 💾 Data sources
A collection of geospatial open data sources:
### Downloads
Data available for download:
- [OpenMapTiles](https://openmaptiles.org/) - Free OpenStreetMap Vector Tiles.
- [OpenStreetMap](https://www.geofabrik.de/data/download.html) - A free, world-wide geographic data set.
- [Natural Earth](https://www.naturalearthdata.com/) - Free vector and raster map data at 1:10m, 1:50m, and 1:110m scales.
- [World Atlas TopoJSON](https://github.com/topojson/world-atlas) - A convenient redistribution of Natural Earth's vector data as TopoJSON.
- [Copernicus global DEM](https://ec.europa.eu/eurostat/web/gisco/geodata/reference-data/elevation/copernicus-dem/elevation) - Global elevation tiles.
- [ETOPO1](https://www.ngdc.noaa.gov/mgg/global/) - 1 arc-minute global relief model of Earth's surface that integrates land topography and ocean bathymetry.
- [HydroSHEDS](https://www.hydrosheds.org/) - Hydrographic information in a consistent and comprehensive format for regional and global-scale applications.
- [geoboundaries](https://www.geoboundaries.org/) - The world's largest open, free and research-ready database of political administrative boundaries.
- [Global power plant database](https://datasets.wri.org/dataset/globalpowerplantdatabase) - A comprehensive, global, open source database of power plants.
- [Ookla internet speed data](https://github.com/teamookla/ookla-open-data) - Provides global network performance metrics. Data is provided in both Shapefile format as well as Apache Parquet.
- [European population grids - GISCO](https://ec.europa.eu/eurostat/web/gisco/geodata/reference-data/grids) - These datasets contain grid cells covering the European land territory, for various resolutions from 1km to 100km. Base statistics such as population figures are provided for these cells.
- [Healthcare Services in Europe](https://ec.europa.eu/eurostat/web/gisco/geodata/reference-data/healthcare-services) - The locations of healthcare services across Europe.
- [European Postcodes Point Data](https://ec.europa.eu/eurostat/web/gisco/geodata/reference-data/postal-codes) - The postal code point dataset shows the location of postal codes, NUTS codes and the Degree of Urbanisation classification across the EU, EFTA and candidate countries from a variety of sources.
- [WorldPop](https://www.worldpop.org/) - Open access spatial demographic datasets built using transparent approaches.
- [World Bank](https://data.worldbank.org/) - Free and open access to global development data.
- [USGS Earth Explorer](https://earthexplorer.usgs.gov/) - Query and order satellite images, aerial photographs, and cartographic products through the U.S. Geological Survey.
- [Open Topography](https://opentopography.org/) - OpenTopography facilitates community access to high-resolution, Earth science-oriented, topography data, and related tools and resources.
- [NASA Earth Observations](https://neo.gsfc.nasa.gov/) - Here you can browse and download imagery of satellite data from NASA's constellation of Earth Observing System satellites.
- [Geodata.gov.gr](https://geodata.gov.gr/en/dataset) - Open geospatial data for Greece.
- [ArcGIS Hub](https://hub.arcgis.com/) - Over 380,000 open datasets.
- [DIVA-GIS](https://www.diva-gis.org/Data) - Free spatial data. Includes country and global level boundaries, climate, species occurence, crop and elevation data.
- [OpenAerialMap](https://openaerialmap.org/) - An open service to provide access to a commons of openly licensed imagery and map layer services.
- [Global Climate Monitor](https://www.globalclimatemonitor.org/) - Global open climate data available for download.
- [Copernicus open access hub](https://scihub.copernicus.eu) - Copernicus satellite imagery download.
### Web APIs
Restful APIs for consuming geospatial data on the fly:
- [GISCO data distribution API](https://gisco-services.ec.europa.eu/distribution/v2/) - Official European Commission data source for administrative regions, Coastal lines, Communes, Countries, Local Administrative Units, NUTS, Urban Audit.
- [Address API](https://gisco-services.ec.europa.eu/addressapi/docs/) - Pan-european address data. Supports geocoding, reverse-geocoding and lists of address components (e.g. all roads in a city).
- [USGS earthquake data](https://earthquake.usgs.gov/fdsnws/event/1/) - Allows custom searches for earthquake information using a variety of parameters.
- [movebank-api](https://github.com/movebank/movebank-api-doc) - Movebank is a free, online database and research platform for animal tracking and other on-animal sensor data.
- [Overpass API](https://wiki.openstreetmap.org/wiki/Overpass_API) - Retrieve OpenStreetMap data.
- [REST countries](https://restcountries.com/) - Get information about countries via a RESTful API.
- [OSMNames](https://osmnames.org/api/) - The OSMNames open-source project provides raw place data in an easy-to-index form.
- [Open Notify](http://open-notify.org/Open-Notify-API/) - Get the current location of the International Space Station (ISS) and current number of people in space!
- [OpenAQ](https://docs.openaq.org/docs) - OpenAQ is the largest open-source air quality data platform.
- [openrouteservice](https://openrouteservice.org/dev/#/api-docs) - Directions, Isochrones, Time-Distance Matrix, Pelias Geocoding, POIs, Elevation, Optimization.
- [GraphHopper Route Optimization API](https://www.graphhopper.com/route-optimization/) - Solves a variety of vehicle routing problems, including the classical “traveling salesman problem”.
- [Geoapify](https://apidocs.geoapify.com/) - Maps, address and location search, route optimization, reachability analysis, geodata access, and more.
- [OpenCage](https://opencagedata.com/api) - Forward and reverse worldwide geocoding API using open data.
- [breezometer](https://docs.breezometer.com/api-documentation/introduction/) - API endpoints for Air Quality, Pollen, Weather, Wildfire, Cleanest Routes and Environmental Alerts. Serves current conditions, forecasts, and historical data.
- [IQAir](https://www.iqair.com/air-pollution-data-api) - Air quality API. Global, historical, real-time and forecast air quality data.
- [ipfind](https://ipfind.io/) - Geographic location of an IP address or any domain name along with some other useful information.
- [bng2latlong](https://www.getthedata.com/bng2latlong) - Simple API to convert an OSGB36 easting and northing (British National Grid) to WGS84 latitude and longitude.
- [Open Postcode Geo API](https://www.getthedata.com/open-postcode-geo-api) - British postcodes with easting, northing, latitude, and longitude.
- [Country State City API](https://countrystatecity.in/) - Full Database of city state country available in JSON, SQL, XML, YAML & CSV format.
- [API Geo](https://api.gouv.fr/les-api/api-geo) - Official French geographical data API.
- [geonames](http://www.geonames.org/export/web-services.html) - Supports placename lookup, postal Code Search, reverse geocoding, nearby populated place and nearby toponym searches.
- [opentopodata API](https://www.opentopodata.org/) - Open Topo Data is a REST API server for your elevation data.
- [what3words](https://developer.what3words.com/public-api) - Convert 3 word addresses to coordinates and vice versa.
- [TomTom](https://developer.tomtom.com/api-explorer-index/documentation/product-information/introduction) - Charging stations, fuel prices, routing, geocoding, parking availability, traffic and waypoint optimization endpoints.
- [geoplugin](https://www.geoplugin.com/webservices) - Free geolocation and currency conversion API.
- [Open Charge Map API](https://openchargemap.org/site/develop/api) - Non-commercial, non-profit global public registry of electric vehicle charging locations.
- [OpenSky API](https://openskynetwork.github.io/opensky-api/rest.html) - Retrieve live airspace information for research and non-commerical purposes.
- [Open-Meteo](https://open-meteo.com/) - Global weather forecast API for non-commercial use.
- [RainViewer](https://www.rainviewer.com/api.html) - Free weather API. Offers the past (2 hours) and forecast (30 minutes) weather radar data and the past infrared satellite data.
- [Sunrise and sunset](https://sunrise-sunset.org/api) - Sunset and sunrise times for a given latitude and longitude.
- [Geocode.xyz](https://geocode.xyz/) - Reverse Geocoding, Forward Geocoding, Geoparsing API. Free requests throttled at 1 request per second.
### Collections
Compilations and repositories of open geospatial datasets:
- [awesome-public-datasets](https://github.com/awesomedata/awesome-public-datasets) - An awesome repository full of open datasets from an abundance of different categories.
- [Free GIS data](https://freegisdata.rtwilson.com/) - Links to over 500 sites providing freely available geographic datasets.
- [WRI](https://www.wri.org/data) - World resources institute.
- [Public APIs](https://github.com/public-apis-dev/public-apis) - A collective list of free APIs for use in software and web development.
## 📒 Notebooks
Some JavaScript notebooks to help you code:
### Beginner
- [Hello, Leaflet](https://observablehq.com/@observablehq/hello-leaflet) - ObservableHQ.
- [Hello, Bertin.js](https://observablehq.com/@neocartocnrs/hello-bertin-js) - Nicolas Lambert.
- [Hello, Mapbox GL](https://observablehq.com/@observablehq/hello-mapbox-gl) - Mike Bostock.
- [Hello, eurostat-map.js](https://observablehq.com/@joewdavies/eurostat-map-js) - Joe Davies.
- [Hello, gridviz](https://observablehq.com/@neocartocnrs/hello-gridviz) - Nicolas Lambert.
### Intermediate
- [World Tour](https://observablehq.com/@d3/world-tour) - D3.
- [Choropleth](https://observablehq.com/@d3/choropleth) - D3.
- [How to make a nice scalebar](https://observablehq.com/@jgaffuri/nice-scale-bar) - Julien Gaffuri.
- [#GISCHAT Twitter Users with MapBoxGL - Globe Projection](https://observablehq.com/@chriszrc/gischat-twitter-users-with-mapboxgl-globe-projection) - Chris Marx.
- [Hexgrid maps with d3-hexgrid](https://observablehq.com/@larsvers/hexgrid-maps-with-d3-hexgrid) - Larsvers.
- [Bivariate Choropleth with Continuous Color Scales](https://observablehq.com/@stephanietuerk/bivariate-choropleth-with-continuous-color-scales) - Stephanie Tuerk.
- [Visualizing Eurostat grid data using Three.js & D3](https://observablehq.com/@joewdavies/visualizing-eurostat-grid-data-using-three-js-d3) - Joe Davies.
### Advanced
- [Try to impeach this? Challenge accepted!](https://observablehq.com/@karimdouieb/try-to-impeach-this-challenge-accepted) - Karim Douieb.
- [Bars and pubs in Paris](https://observablehq.com/@neocartocnrs/bars-pubs-in-paris) - Nicolas Lambert.
- [Brussels Street Gender Inequality](https://observablehq.com/@karimdouieb/brussels-streets-gender-inequality) - Karim Douieb.
- [Animating voting maps with regl](https://observablehq.com/@bmschmidt/animating-voting-maps-with-regl) - Benjamin Schmidt.
- [Election maps as dorling striped circles](https://observablehq.com/@jgaffuri/election-map-dorling-striped-circles) - Julien Gaffuri.
- [GeoParquet on the web](https://observablehq.com/@kylebarron/geoparquet-on-the-web) - Kyle Barron.
- [Interactive Regl wind demo](https://observablehq.com/@dkaoster/interactive-regl-wind-demo) - Daniel Kao.
- [Dorling cartogram of the Spanish Presidential election](https://observablehq.com/@adrianblanco/dorling-cartogram-of-the-spanish-presidential-election) - Adrián Blanco.
- [Visualizing earthquakes with Three.js](https://observablehq.com/@joewdavies/visualizing-earthquakes-with-three-js) - Joe Davies.
## :world_map: Web maps
A compilation of interesting web maps:
- [Map of notable people](https://tjukanovt.github.io/notable-people) - Topi Tjukanov.
- [Submarine cable map](https://www.submarinecablemap.com/) - TeleGeography.
- [Radio Garden](https://radio.garden/) - 3D Globe Radio Tuner.
- [Map of every building in the United States](https://www.nytimes.com/interactive/2018/10/12/us/map-of-every-building-in-the-united-states.html) - New York Times.
- [Map of the Roman transport network](https://orbis.stanford.edu/) - The Stanford Geospatial Network Model of the Roman World.
- [Webgl Wind](https://github.com/mapbox/webgl-wind) - A WebGL-powered visualization of wind power. Capable of rendering up to 1 million wind particles at 60fps.
## 🌐 Web apps
Plug-and-play geospatial web apps:
- [city roads](https://anvaka.github.io/city-roads/) - Render every single road in any city at once.
- [Kepler](https://kepler.gl/demo) - A powerful open source geospatial analysis tool for large-scale data sets.
- [Plasio](https://github.com/verma/plasio) - Drag-n-drop In-browser LAS/LAZ point cloud viewer.
- [mapshaper](https://mapshaper.org/) - Online editor for map data.
- [geotiff.io](http://app.geotiff.io/) - GeoTIFF.io provides quick access to easy-to-use raster processing.
- [IMAGE](https://gisco-services.ec.europa.eu/image/) - An easy-to-use tool for generating thematic maps.
- [magrit](https://magrit.cnrs.fr/) - Magrit is an online application for thematic mapping (cartography).
- [StoryMap JS](https://storymap.knightlab.com/) - The Open source alternative to ESRI's Story map application.
- [Datawrapper](https://github.com/datawrapper/datawrapper) - Create charts, maps, and tables.
- [Maputnik](https://github.com/maputnik/editor) - A free and open visual editor for the Mapbox GL styles targeted at developers and map designers.
- [Fantasy Map Generator](https://github.com/Azgaar/Fantasy-Map-Generator) - Free web application that helps fantasy writers, game masters, and cartographers create and edit fantasy maps.
- [uMap](https://github.com/umap-project/umap) - Lets you create a map with OpenStreetMap layers and embed it in your site.
- [Peak Map](https://github.com/anvaka/peak-map) - Allows you to visualize elevation of any area on the map with filled area charts (also known as a ridgeline).
- [mapus](https://github.com/alyssaxuu/mapus) - Mapus is a tool to explore and annotate collaboratively on a map.
- [MapOnShirt](https://maponshirt.com) - A fun tool for creating colorful designs from maps and turning them into T-shirts, Posters, Cushions and more.
## 🎨 Colour advice
Colour usage is very important in data visualisation and cartography. Here are some tools to help you choose the best colours for your maps:
- [ColorBrewer](https://colorbrewer2.org/) - Colour advice for maps, based on the research of Dr. Cynthia Brewer.
- [viz-palette](https://projects.susielu.com/viz-palette) - This project is optimized for tweaking, copying, and pasting colors in and out of JavaScript.
- [Chroma.js Color Palette Helper](https://gka.github.io/palettes/#/9) - This chroma.js-powered tool is here to help us mastering multi-hued, multi-stops color scales.
- [Dicopal.js](https://github.com/riatelab/dicopal.js) - Discrete color palettes (hundreds of them!) for JavaScript.
- [Textures.js](https://github.com/riccardoscalco/textures) - JavaScript library for creating SVG patterns. Made on top of d3.js, it is designed for data visualization. Textures are useful for the
selective perception of different categories.
## 📍 Icons
Icons to add to your GIS websites:
- [font-GIS](https://github.com/Viglino/font-gis) - A very very cool icon font set for use with GIS and spatial analysis tools.
- [Map Icons Collection](https://mapicons.mapsmarker.com/) - A set of more than 1000 free and customizable icons to use as placemarks for your POI (Point of Interests) locations on your maps.
- [Material Symbols](https://fonts.google.com/icons?icon.query=map) - Over 2,990 glyphs in a single font file with a wide range of design variants.
- [Geoapify map marker playground](https://apidocs.geoapify.com/playground/icon/) - The Marker Icon API lets you create beautiful icons and use them as Map Markers.
## 📺 Videos
Videos of web mapping presentations and tutorials:
- [Mapping Geolocation with Leaflet.js - Working with Data and APIs in JavaScript](https://www.youtube.com/watch?v=nZaZ2dB6pow) - The Coding Train.
- [10 Maps, and the Tech and Stories Behind Them](https://www.youtube.com/watch?v=PpWAKVjPlgU) - Maarten Lambrechts.
- [Intermediate Three.js Tutorial - Create a Globe with Custom Shaders](https://www.youtube.com/watch?v=vM8M4QloVL0&t=4418s) - Chris Courses.
- [Statistical Cartography - Design principles for statistical map design](https://www.youtube.com/watch?v=e803ElX5Q_c) - Julien Gaffuri.
## 📚 Further reading
- [Fundamentals of Data Visualization](https://clauswilke.com/dataviz/) - Claus O. Wilke.
- [A Workbook for Interactive Cartography and Visualization on the Open Web](https://github.com/uwcartlab/webmapping) - Robert Roth, Carl Sack, Gareth Baldrica-Franklin, Yuying Chen, Rich Donohue, Lily Houtman, Tim Prestby, Robin Tolochko, Nick Underwood.
- [Thematic Mapping: 101 Inspiring Ways to Visualise Empirical Data](https://www.esri.com/en-us/esri-press/browse/thematic-mapping) - Kenneth Field.
- [Color use guidelines for mapping and visualization](https://colorbrewer2.org/learnmore/schemes_full.html#qualitative) - Cynthia A. Brewer.
## Contributing
Contributions of any kind are welcome, just follow the [guidelines](./CONTRIBUTING.md) by either:
- Filling a [suggestion issue](https://github.com/joewdavies/awesome-frontend-gis/issues/new/) (easier).
- Opening a [pull request](https://github.com/joewdavies/awesome-frontend-gis/compare).
---
If you have any questions about this list, please don't hesitate to contact me [@joewdavies](https://twitter.com/joewdavies) on Twitter or [open a GitHub issue](https://github.com/joewdavies/awesome-frontend-gis/issues/new).
| {
"repo_name": "joewdavies/awesome-frontend-gis",
"stars": "159",
"repo_language": "None",
"file_name": "lint.yml",
"mime_type": "text/plain"
} |
name: Awesome Lint
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- run: npx awesome-lint
| {
"repo_name": "joewdavies/awesome-frontend-gis",
"stars": "159",
"repo_language": "None",
"file_name": "lint.yml",
"mime_type": "text/plain"
} |
<!DOCTYPE html>
<html>
<head>
<meta
http-equiv="refresh"
content="0;URL='https://cfpb.github.io/design-system/?utm_source=capitalframework&utm_medium=redirect&utm_campaign=designsystemlaunch'"
/>
<meta name="robots" content="noindex">
</head>
<body>
<p>
This page has moved to https://cfpb.github.io/design-system/.
</p>
</body>
</html>
| {
"repo_name": "cfpb/capital-framework",
"stars": "55",
"repo_language": "HTML",
"file_name": "README.md",
"mime_type": "text/plain"
} |
<!DOCTYPE html>
<html>
<head>
<meta
http-equiv="refresh"
content="0;URL='https://cfpb.github.io/design-system/?utm_source=capitalframework&utm_medium=redirect&utm_campaign=designsystemlaunch'"
/>
<meta name="robots" content="noindex">
</head>
<body>
<p>
This page has moved to https://cfpb.github.io/design-system/.
</p>
</body>
</html>
| {
"repo_name": "cfpb/capital-framework",
"stars": "55",
"repo_language": "HTML",
"file_name": "README.md",
"mime_type": "text/plain"
} |
# Capital Framework
# :warning: THIS REPO IS DEPRECATED (July 10, 2020) :warning:
Please migrate to using the [design-system](https://github.com/cfpb/design-system).
See https://cfpb.github.io/capital-framework-archive/ for a snapshot of what
this site used to look like.
| {
"repo_name": "cfpb/capital-framework",
"stars": "55",
"repo_language": "HTML",
"file_name": "README.md",
"mime_type": "text/plain"
} |
package main
import (
"os"
"testing"
)
func TestGetGoPath(t *testing.T) {
tmpdir := os.TempDir()
wd, err := os.Getwd()
if err != nil {
t.Fatalf("Error getting working directory: %s", err.Error())
}
origGopath := os.Getenv("GOPATH")
// Verify that getGoPath works
_, err = getGoPath(wd)
if err != nil {
t.Errorf("Expected getGoPath success, got %s", err.Error())
}
// Unset GOPATH and verify that getGoPath fails
err = os.Unsetenv("GOPATH")
if err != nil {
t.Fatal(err.Error())
}
_, err = getGoPath(wd)
if err == nil {
t.Errorf("Expected getGoPath failure, got %s", err)
}
// Set gopath to tmp directory and verify that getGoPath fails
err = os.Setenv("GOPATH", tmpdir)
if err != nil {
t.Fatal(err.Error())
}
_, err = getGoPath(wd)
if err == nil {
t.Errorf("Expected getGoPath failure, got %s", err)
}
// Set gopath to GOPATH + tmp directory and verify that getGoPath succeeds
err = os.Setenv("GOPATH", origGopath+string(os.PathListSeparator)+tmpdir)
if err != nil {
t.Fatal(err.Error())
}
_, err = getGoPath(wd)
if err != nil {
t.Errorf("Expected getGoPath failure, got %s", err.Error())
}
}
| {
"repo_name": "sparrc/gdm",
"stars": "73",
"repo_language": "Go",
"file_name": "TestGodeps",
"mime_type": "text/plain"
} |
golang.org/x/tools 3b1faeda9afbcba128c2d794b38ffe7982141139
| {
"repo_name": "sparrc/gdm",
"stars": "73",
"repo_language": "Go",
"file_name": "TestGodeps",
"mime_type": "text/plain"
} |
package main
import (
"bufio"
"flag"
"fmt"
"os"
"path/filepath"
"strings"
"sync"
"time"
)
var (
// Version can be auto-set at build time using an ldflag
// go build -ldflags "-X main.Version `git describe --tags --always`"
Version string
// DepsFile specifies the Godeps file used by gdm
DepsFile string = "Godeps"
// Parallel specifies whether to 'restore' in parallel
// This is primarily for debug/logging purposes
Parallel bool = true
)
const usage = `Go Dependency Manager (gdm), a lightweight tool for managing Go dependencies.
Usage:
gdm <command> [-f GODEPS_FILE] [-v]
The commands are:
vendor Check out revisions defined in Godeps file in ./vendor directory.
restore Check out revisions defined in Godeps file in $GOPATH.
save Saves currently checked-out dependencies from $GOPATH to Godeps file.
brew Outputs homebrew go_resource entries to stdout.
version Prints the version.
`
func main() {
flag.Usage = usageExit
flag.Parse()
args := flag.Args()
var verbose bool
if len(args) < 1 {
usageExit()
} else if len(args) > 1 {
set := flag.NewFlagSet("", flag.ExitOnError)
set.StringVar(&DepsFile, "f", "Godeps", "Specify the name/location of Godeps file")
set.BoolVar(&verbose, "v", false, "Verbose mode")
set.BoolVar(&Parallel, "parallel", true, "Execute gdm restore in parallel")
set.Parse(os.Args[2:])
}
wd, err := os.Getwd()
if err != nil {
panic(err)
}
gopath, err := getGoPath(wd)
if err != nil {
fmt.Fprintf(os.Stderr, err.Error())
os.Exit(1)
}
switch args[0] {
case "save", "bootstrap":
splash(wd, "", gopath)
save(wd, gopath, verbose)
case "vendor":
path := filepath.Join(wd, "vendor")
splash(wd, path, gopath)
restore(wd, path, verbose)
case "restore", "get", "sync", "checkout":
path := filepath.Join(gopath, "src")
splash(wd, path, gopath)
restore(wd, path, verbose)
case "brew", "homebrew":
homebrew(wd, gopath, verbose)
case "version":
fmt.Printf("gdm - version %s\n", Version)
default:
usageExit()
}
}
func splash(wd, path, gopath string) {
fmt.Println("======= Go Dependency Manager =======")
fmt.Println("= working dir: ", wd)
fmt.Println("= checkout dir:", path)
fmt.Println("= GOPATH: ", gopath)
fmt.Println("=====================================")
}
func usageExit() {
fmt.Println(usage)
os.Exit(0)
}
// getGoPath returns a single GOPATH. If there are multiple defined in the users
// $GOPATH env variable, then getGoPath validates that the working directory is
// part of one of the GOPATHs, and uses the first one it finds that does.
func getGoPath(wd string) (string, error) {
gopath := os.Getenv("GOPATH")
if gopath == "" {
gopath = filepath.Join(os.Getenv("HOME"), "go")
fmt.Printf("GOPATH not found, assuming %s\n", gopath)
}
// Split out multiple GOPATHs if necessary
if strings.Contains(gopath, string(os.PathListSeparator)) {
paths := strings.Split(gopath, string(os.PathListSeparator))
for _, path := range paths {
if strings.Contains(wd, path) {
gopath = path
break
}
}
}
if !strings.Contains(wd, gopath) {
return "", fmt.Errorf("gdm can only be executed within a directory in"+
" the GOPATH, wd: %s, gopath: %s", wd, gopath)
}
return gopath, nil
}
func homebrew(wd, gopath string, verbose bool) {
imports := ImportsFromFile(filepath.Join(wd, DepsFile))
fmt.Println()
for _, i := range imports {
fmt.Printf(" go_resource \"%s\" do\n", i.ImportPath)
fmt.Printf(" url \"%s.%s\",\n", i.Repo.Repo, i.Repo.VCS.Cmd)
fmt.Printf(" :revision => \"%s\"\n", i.Rev)
fmt.Printf(" end\n")
fmt.Println()
}
}
func save(wd, gopath string, verbose bool) {
imports, err := ImportsFromPath(wd, gopath, verbose)
if err != nil {
fmt.Printf("Fatal error: %s", err)
os.Exit(1)
}
f, err := os.Create(filepath.Join(wd, DepsFile))
if err != nil {
fmt.Printf("Fatal error: %s", err)
os.Exit(1)
}
defer f.Close()
w := bufio.NewWriter(f)
for _, i := range imports {
fmt.Printf("> Saving Import [%s] Revision [%s]\n", i.ImportPath, i.Rev)
_, err = w.WriteString(fmt.Sprintf("%s %s\n", i.ImportPath, i.Rev))
if err != nil {
fmt.Printf("Fatal error: %s", err)
os.Exit(1)
}
}
w.Flush()
}
func restore(wd, path string, verbose bool) {
imports := ImportsFromFile(filepath.Join(wd, DepsFile))
if Parallel {
restoreParallel(imports, path, verbose)
} else {
restoreSerial(imports, path, verbose)
}
}
func restoreParallel(imports []*Import, path string, verbose bool) {
var wg sync.WaitGroup
wg.Add(len(imports))
errC := make(chan error, len(imports))
for _, i := range imports {
i.Verbose = verbose
go func(I *Import) {
defer wg.Done()
err := I.RestoreImport(path)
if err != nil {
errC <- err
}
}(i)
// arbitrary sleep to avoid overloading a single clone endpoint
time.Sleep(time.Millisecond * 30)
}
wg.Wait()
close(errC)
if len(errC) > 0 {
fmt.Println()
fmt.Println("ERROR restoring some imports:")
for err := range errC {
fmt.Printf("- %s", err)
}
os.Exit(1)
}
}
func restoreSerial(imports []*Import, path string, verbose bool) {
for _, i := range imports {
i.Verbose = verbose
i.RestoreImport(path)
}
}
| {
"repo_name": "sparrc/gdm",
"stars": "73",
"repo_language": "Go",
"file_name": "TestGodeps",
"mime_type": "text/plain"
} |
package main
import (
"bufio"
"bytes"
"fmt"
"go/build"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"sort"
"strings"
"golang.org/x/tools/go/vcs"
)
// Import defines an import dependency
type Import struct {
// ie, golang.org/x/tools/go/vcs
ImportPath string
// ie, 759e96ebaffb01c3cba0e8b129ef29f56507b323
Rev string
// Controls verbosity of output
Verbose bool
// see https://godoc.org/golang.org/x/tools/go/vcs#RepoRoot
Repo *vcs.RepoRoot
}
// RestoreImport takes the import and restores it at the given GOPATH.
// There are four steps to this:
// 1. cd $CHECKOUT_PATH/<import_path>
// 2. Checkout default branch (ie, git checkout master)
// 3. Download changes (ie, git pull --ff-only)
// 4. Checkout revision (ie, git checkout 759e96ebaffb01c3cba0e8b129ef29f56507b323)
func (i *Import) RestoreImport(path string) error {
vcs.ShowCmd = i.Verbose
fullpath := filepath.Join(path, i.ImportPath)
fmt.Printf("> Restoring %s to %s\n", fullpath, i.Rev)
// If the repo doesn't exist already, create it
_, err := os.Stat(fullpath)
if err != nil && os.IsNotExist(err) {
if i.Verbose {
fmt.Printf("> Repo %s not found, creating at rev %s\n", fullpath, i.Rev)
}
// Create parent directory
rootpath := filepath.Join(path, i.Repo.Root)
if err = os.MkdirAll(rootpath, os.ModePerm); err != nil {
return fmt.Errorf("Could not create parent directory %s for repo %s\n",
rootpath, fullpath)
}
// Clone repo
if err = i.Repo.VCS.Create(rootpath, i.Repo.Repo); err != nil {
return fmt.Errorf("Error cloning repo at %s, %s\n",
fullpath, err.Error())
}
}
// Attempt to checkout revision.
cmdString := i.Repo.VCS.TagSyncCmd
cmdString = strings.Replace(cmdString, "{tag}", i.Rev, 1)
if _, err = runInDir(i.Repo.VCS.Cmd, strings.Fields(cmdString), fullpath, i.Verbose); err == nil {
return nil
}
// Revision not found, checkout default branch (usually master).
_, err = runInDir(i.Repo.VCS.Cmd, strings.Fields(i.Repo.VCS.TagSyncDefault),
fullpath, i.Verbose)
if err != nil {
return fmt.Errorf("Error checking out default branch (usually master) in repo %s, %s\n",
fullpath, err.Error())
}
// Download changes from remote repo.
err = i.Repo.VCS.Download(fullpath)
if err != nil {
return fmt.Errorf("Error downloading changes to %s, %s\n",
fullpath, err.Error())
}
// Attempt to checkout rev again after downloading changes.
if _, err = runInDir(i.Repo.VCS.Cmd, strings.Fields(cmdString), fullpath, i.Verbose); err != nil {
return fmt.Errorf("Error checking out rev %s of repo %s, %s\n",
i.Rev, fullpath, err.Error())
}
return nil
}
// ImportsFromFile reads the given file and returns Import structs.
func ImportsFromFile(filename string) []*Import {
content, err := ioutil.ReadFile(filename)
if err != nil {
panic(err)
}
lines := strings.Split(string(content), "\n")
imports := []*Import{}
roots := make(map[string]bool)
for _, line := range lines {
line = strings.TrimSpace(line)
if strings.HasPrefix(line, "#") || len(line) == 0 {
// Skip commented line
continue
} else if strings.Contains(line, "#") {
// in-line comment
line = strings.TrimSpace(strings.Split(line, "#")[0])
}
parts := strings.Fields(line)
if len(parts) != 2 {
fmt.Fprintf(os.Stderr, "Invalid line: %s\n", line)
os.Exit(1)
}
path := parts[0]
rev := parts[1]
root, err := getRepoRoot(path)
if err != nil {
fmt.Fprintf(os.Stderr, "Error getting VCS info for %s\n", path)
os.Exit(1)
}
if _, ok := roots[root.Root]; !ok {
roots[root.Root] = true
imports = append(imports, &Import{
Rev: rev,
ImportPath: path,
Repo: root,
})
}
}
return imports
}
// ImportsFromPath looks in the given working directory and finds all 3rd-party
// imports, and returns Import structs
func ImportsFromPath(wd, gopath string, verbose bool) ([]*Import, error) {
// Get a set of transitive dependencies (package import paths) for the
// specified package.
depsOutput, err := runInDir("go",
[]string{"list", "-f", `{{join .Deps "\n"}}`, "./..."},
wd, verbose)
if err != nil {
return nil, err
}
// filter out standard library
deps := filterPackages(depsOutput, nil)
// List dependencies of test files, which are not included in the go list .Deps
// Also, ignore any dependencies that are already covered.
testDepsOutput, err := runInDir("go",
[]string{"list", "-f",
`{{join .TestImports "\n"}}{{"\n"}}{{join .XTestImports "\n"}}`, "./..."},
wd, verbose)
if err != nil {
return nil, err
}
// filter out stdlib and existing deps
testDeps := filterPackages(testDepsOutput, deps)
for dep := range testDeps {
deps[dep] = true
}
// Sort the import set into a list of string paths
sortedImportPaths := []string{}
for path, _ := range deps {
// Do not vendor the repo that we are vendoring
proot, err := getRepoRoot(path)
if err != nil {
return nil, err
}
// If the root of the package in question is the working
// directory then we don't want to vendor it.
if strings.HasSuffix(wd, proot.Root) {
continue
}
sortedImportPaths = append(sortedImportPaths, path)
}
sort.Strings(sortedImportPaths)
// Iterate through imports, creating a list of Import structs
result := []*Import{}
for _, importpath := range sortedImportPaths {
root, err := getRepoRoot(importpath)
if err != nil {
fmt.Fprintf(os.Stderr, "Error getting VCS info for %s, skipping\n", importpath)
continue
}
_, ok := deps[root.Root]
if root.Root == importpath || !ok {
// Use the repo root as importpath if it's a usable go VCS repo
if _, err := getRepoRoot(root.Root); err == nil {
deps[root.Root] = true
importpath = root.Root
}
// If this is the repo root, or root is not already imported
fullpath := filepath.Join(gopath, "src", importpath)
rev := getRevisionFromPath(fullpath, root)
result = append(result, &Import{
Rev: rev,
ImportPath: importpath,
Repo: root,
})
}
}
return result, nil
}
// getImportPath takes a path like /home/csparr/go/src/github.com/sparrc/gdm
// and returns the import path, ie, github.com/sparrc/gdm
func getImportPath(fullpath string) string {
p, err := build.ImportDir(fullpath, 0)
if err != nil {
fmt.Println(err)
return ""
}
return p.ImportPath
}
// getRepoRoot takes an import path like github.com/sparrc/gdm
// and returns the VCS Repository information for it.
func getRepoRoot(importpath string) (*vcs.RepoRoot, error) {
repo, err := vcs.RepoRootForImportPath(importpath, false)
if err != nil {
return nil, err
}
return repo, nil
}
// getRevisionFromPath takes a path like /home/csparr/go/src/github.com/sparrc/gdm
// and the VCS Repository information and returns the currently checked out
// revision, ie, 759e96ebaffb01c3cba0e8b129ef29f56507b323
func getRevisionFromPath(fullpath string, root *vcs.RepoRoot) string {
// Check that we have the executable available
_, err := exec.LookPath(root.VCS.Cmd)
if err != nil {
fmt.Fprintf(os.Stderr, "gdm missing %s command.\n", root.VCS.Name)
os.Exit(1)
}
// Determine command to get the current hash
var cmd *exec.Cmd
switch root.VCS.Cmd {
case "git":
cmd = exec.Command("git", "rev-parse", "HEAD")
case "hg":
cmd = exec.Command("hg", "id", "-i")
case "bzr":
cmd = exec.Command("bzr", "revno")
default:
fmt.Fprintf(os.Stderr, "gdm does not support %s\n", root.VCS.Cmd)
os.Exit(1)
}
cmd.Dir = fullpath
output, err := cmd.Output()
if err != nil {
fmt.Fprintf(os.Stderr, "Error getting revision hash at %s, %s\n",
fullpath, err.Error())
os.Exit(1)
}
return strings.TrimSpace(string(output))
}
// filterPackages accepts the output of a go list comment (one package per line)
// and returns a set of package import paths, excluding standard library.
// Additionally, any packages present in the "exclude" set will be excluded.
func filterPackages(output []byte, exclude map[string]bool) map[string]bool {
var scanner = bufio.NewScanner(bytes.NewReader(output))
var deps = map[string]bool{}
for scanner.Scan() {
var (
pkg = scanner.Text()
slash = strings.Index(pkg, "/")
stdLib = slash == -1 || strings.Index(pkg[:slash], ".") == -1
)
if stdLib {
continue
}
if _, ok := exclude[pkg]; ok {
continue
}
deps[pkg] = true
}
return deps
}
// runInDir runs the given command (name) with args, in the given directory.
// if verbose, prints out the command and dir it is executing.
// This function exits the whole program if it fails.
// Returns output of the command.
func runInDir(name string, args []string, dir string, verbose bool) ([]byte, error) {
cmd := exec.Command(name, args...)
cmd.Dir = dir
if verbose {
fmt.Printf("cd %s\n%s %s\n", dir, name, strings.Join(args, " "))
}
output, err := cmd.Output()
if err != nil {
fmt.Errorf("Error running %s %s in dir %s, %s\n",
name, strings.Join(args, " "), dir, err.Error())
return output, err
}
return output, nil
}
| {
"repo_name": "sparrc/gdm",
"stars": "73",
"repo_language": "Go",
"file_name": "TestGodeps",
"mime_type": "text/plain"
} |
package main
import (
"os"
"path/filepath"
"testing"
)
func TestSetRootImport(t *testing.T) {
wd, err := os.Getwd()
if err != nil {
t.Errorf("Unexpected error: %s", err.Error())
}
s := "github.com/sparrc/gdm"
rootImport := getImportPath(wd)
if rootImport != s {
t.Errorf("Expected rootImport %s, got %s", s, rootImport)
}
}
func TestGetRepoRoot(t *testing.T) {
s := "github.com/sparrc/gdm"
_, err := getRepoRoot(s)
if err != nil {
t.Errorf("Unexpected error: %s", err.Error())
}
}
func TestImportsFromFile(t *testing.T) {
wd, err := os.Getwd()
if err != nil {
t.Errorf("Unexpected error: %s", err.Error())
}
filename := filepath.Join(wd, "test", "TestGodeps")
imports := ImportsFromFile(filename)
if len(imports) != 19 {
t.Errorf("Expected %d imports, got %d", 20, len(imports))
}
tests := []struct {
importpath string
rev string
}{
{"collectd.org/api", "9fc824c70f713ea0f058a07b49a4c563ef2a3b98"},
// This import is in file but has the same "repo root" as collectd.org/api
// so it shouldn't show up in the 'restore' import paths
// {"collectd.org/network", "9fc824c70f713ea0f058a07b49a4c563ef2a3b98"},
{"github.com/BurntSushi/toml", "056c9bc7be7190eaa7715723883caffa5f8fa3e4"},
{"github.com/bmizerany/pat", "b8a35001b773c267eb260a691f4e5499a3531600"},
{"github.com/boltdb/bolt", "b34b35ea8d06bb9ae69d9a349119252e4c1d8ee0"},
{"github.com/davecgh/go-spew", "5215b55f46b2b919f50a1df0eaa5886afe4e3b3d"},
{"github.com/dgryski/go-bits", "86c69b3c986f9d40065df5bd8f765796549eef2e"},
{"github.com/dgryski/go-bitstream", "27cd5973303fde7d914860be1ea4b927a6be0c92"},
{"github.com/gogo/protobuf", "e492fd34b12d0230755c45aa5fb1e1eea6a84aa9"},
{"github.com/golang/snappy", "723cc1e459b8eea2dea4583200fd60757d40097a"},
{"github.com/hashicorp/raft", "d136cd15dfb7876fd7c89cad1995bc4f19ceb294"},
{"github.com/hashicorp/raft-boltdb", "d1e82c1ec3f15ee991f7cc7ffd5b67ff6f5bbaee"},
{"github.com/influxdb/enterprise-client", "25665cba4f54fa822546c611c9414ac31aa10faa"},
{"github.com/jwilder/encoding", "07d88d4f35eec497617bee0c7bfe651a796dae13"},
{"github.com/kimor79/gollectd", "61d0deeb4ffcc167b2a1baa8efd72365692811bc"},
{"github.com/paulbellamy/ratecounter", "5a11f585a31379765c190c033b6ad39956584447"},
{"github.com/peterh/liner", "4d47685ab2fd2dbb46c66b831344d558bc4be5b9"},
{"github.com/rakyll/statik", "274df120e9065bdd08eb1120e0375e3dc1ae8465"},
{"golang.org/x/crypto", "7b85b097bf7527677d54d3220065e966a0e3b613"},
{"gopkg.in/fatih/pool.v2", "cba550ebf9bce999a02e963296d4bc7a486cb715"},
}
for i, test := range tests {
i := imports[i]
if i.ImportPath != test.importpath {
t.Errorf("Expected %s, actual %s", test.importpath, i.ImportPath)
}
if i.Rev != test.rev {
t.Errorf("Expected %s, actual %s", test.rev, i.Rev)
}
}
}
| {
"repo_name": "sparrc/gdm",
"stars": "73",
"repo_language": "Go",
"file_name": "TestGodeps",
"mime_type": "text/plain"
} |
# Go Dependency Manager (gdm) [](https://circleci.com/gh/sparrc/gdm)
gdm aims to do as little as possible. It will checkout dependencies to the
local vendor directory and does not require that people use `gdm` to build
your project. In this way, people can still simply `go get` your project
and build.
We would recommend that you add `vendor` to your .gitignore file when using gdm.
This tool assumes you are working in a standard Go workspace, as described in
http://golang.org/doc/code.html.
### Install
```
go get github.com/sparrc/gdm
```
### How to use gdm with a new project
Assuming your Go workspace is setup, so you can build your project
with `go install` or `go install ./...`, it's one command to start using:
```
gdm save
```
This will create a new file in your repo directory called `Godeps`, which
specifies project dependencies and their revisions. This file is identical to
the file used by [gpm](https://github.com/pote/gpm).
Godeps is a simple text file of repo roots and revisions:
```
collectd.org/api 9fc824c70f713ea0f058a07b49a4c563ef2a3b98
collectd.org/network 9fc824c70f713ea0f058a07b49a4c563ef2a3b98
github.com/BurntSushi/toml 056c9bc7be7190eaa7715723883caffa5f8fa3e4
```
The file supports comments using the `#` character.
## Vendor
The `gdm vendor` command is the opposite of `gdm save`. It will checkout the
package versions specified in Godeps to the vendor directory.
### Add a Dependency
To add a new package github.com/foo/bar, do this:
1. Run `go get github.com/foo/bar`
1. Run `gdm save`
### Update a Dependency
To update a package to the latest version, do this:
1. Run `rm -rf ./vendor`
1. Run `go get -u github.com/foo/bar`
1. Run `gdm save`
Before committing the change, you'll probably want to inspect the changes to
Godeps, for example with `git diff`, and make sure it looks reasonable.
### Update all dependencies
To update all dependencies from your `$GOPATH`, do this:
1. Run `rm -rf ./vendor`
1. Run `go get -u ./...`
1. Run `gdm save`
### Building a gdm project
Building a project managed by gdm looks like this:
1. Run `go get github.com/foo/bar`
1. Run `cd $GOPATH/src/github.com/foo/bar`
1. Run `gdm vendor`
1. Build: `go install ./...`
## Homebrew
To help make a [homebrew](https://github.com/Homebrew/homebrew)
formula for your Go project, gdm supports a `gdm brew` command, which will print
out your dependencies to stdout in the homebrew go_resource format, like this:
```console
$ gdm brew
go_resource "collectd.org/api" do
url "https://github.com/collectd/go-collectd.git",
:revision => "9fc824c70f713ea0f058a07b49a4c563ef2a3b98"
end
go_resource "collectd.org/network" do
url "https://github.com/collectd/go-collectd.git",
:revision => "9fc824c70f713ea0f058a07b49a4c563ef2a3b98"
end
go_resource "github.com/BurntSushi/toml" do
url "https://github.com/BurntSushi/toml.git",
:revision => "056c9bc7be7190eaa7715723883caffa5f8fa3e4"
end
...
```
### Restore
The `gdm restore` command works similar to the `gdm vendor` command, but instead
of checking out the dependencies in the ./vendor directory, it will checkout the
dependencies in your current GOPATH. This will modify repos in your GOPATH.
This can be useful for debugging or if you are using a Go version earlier than
1.9.
#### Acknowledgements
If you're familiar with Go dependency management, you can probably see the
similarities with [gpm](https://github.com/pote/gpm) and
[godep](https://github.com/tools/godep). This tool could not have existed without
their influence!
| {
"repo_name": "sparrc/gdm",
"stars": "73",
"repo_language": "Go",
"file_name": "TestGodeps",
"mime_type": "text/plain"
} |
collectd.org/api 9fc824c70f713ea0f058a07b49a4c563ef2a3b98
collectd.org/network 9fc824c70f713ea0f058a07b49a4c563ef2a3b98
github.com/BurntSushi/toml 056c9bc7be7190eaa7715723883caffa5f8fa3e4
github.com/bmizerany/pat b8a35001b773c267eb260a691f4e5499a3531600
github.com/boltdb/bolt b34b35ea8d06bb9ae69d9a349119252e4c1d8ee0
github.com/davecgh/go-spew 5215b55f46b2b919f50a1df0eaa5886afe4e3b3d
github.com/dgryski/go-bits 86c69b3c986f9d40065df5bd8f765796549eef2e
github.com/dgryski/go-bitstream 27cd5973303fde7d914860be1ea4b927a6be0c92
github.com/gogo/protobuf e492fd34b12d0230755c45aa5fb1e1eea6a84aa9
github.com/golang/snappy 723cc1e459b8eea2dea4583200fd60757d40097a
github.com/hashicorp/raft d136cd15dfb7876fd7c89cad1995bc4f19ceb294
github.com/hashicorp/raft-boltdb d1e82c1ec3f15ee991f7cc7ffd5b67ff6f5bbaee
github.com/influxdb/enterprise-client 25665cba4f54fa822546c611c9414ac31aa10faa
github.com/jwilder/encoding 07d88d4f35eec497617bee0c7bfe651a796dae13
github.com/kimor79/gollectd 61d0deeb4ffcc167b2a1baa8efd72365692811bc
github.com/paulbellamy/ratecounter 5a11f585a31379765c190c033b6ad39956584447
github.com/peterh/liner 4d47685ab2fd2dbb46c66b831344d558bc4be5b9
github.com/rakyll/statik 274df120e9065bdd08eb1120e0375e3dc1ae8465
golang.org/x/crypto 7b85b097bf7527677d54d3220065e966a0e3b613
gopkg.in/fatih/pool.v2 cba550ebf9bce999a02e963296d4bc7a486cb715
| {
"repo_name": "sparrc/gdm",
"stars": "73",
"repo_language": "Go",
"file_name": "TestGodeps",
"mime_type": "text/plain"
} |
#!/bin/bash
# For local dev:
# export FLASK_DEBUG=1
# export FLASK_APP=server.py
# flask run
# To restart server, kill -HUP <pid>
mkdir log 2> /dev/null
DEBUG=0 authbind gunicorn -b 0.0.0.0:80 server:app --access-logfile log/access.log --error-logfile log/general.log
| {
"repo_name": "Smerity/search_iclr_2018",
"stars": "78",
"repo_language": "HTML",
"file_name": "README.md",
"mime_type": "text/plain"
} |
import flask
app = flask.Flask(__name__)
import glob
import json
data = json.load(open('notes.json'))
keys = {}
for entry in data['notes']:
keys[entry['id']] = entry
papers = []
for paper in glob.glob('text/papers/*'):
papers.append([paper.split('id=')[1], open(paper, encoding='utf-8', errors='ignore').read()])
papers = sorted(papers)
print('Loaded {} papers'.format(len(papers)))
from reviewer import ratings, sorted_papers
@app.route("/")
def index():
query = 'Top 100 papers'
found = []
for paperid, paper in papers:
if paperid in sorted_papers[-100:]:
if paperid in keys:
rank = sorted_papers.index(paperid)
d = dict(id=paperid, title=keys[paperid]['content']['title'], data=keys[paperid], rating=ratings[paperid], rank=rank, pct=int(100 * rank / len(sorted_papers)))
found.append(d)
found = sorted(found, key=lambda x: x['rank'], reverse=True)
return flask.render_template('base.html', query=query, results=found, total_papers=len(sorted_papers))
@app.route("/search/")
def search(query=None):
query = query if query else flask.request.args.get('query')
found = []
for paperid, paper in papers:
if query.lower() in paper.lower():
if paperid in keys:
rank = sorted_papers.index(paperid)
d = dict(id=paperid, title=keys[paperid]['content']['title'], data=keys[paperid], rating=ratings[paperid], rank=rank, pct=int(100 * rank / len(sorted_papers)))
found.append(d)
found = sorted(found, key=lambda x: x['rank'], reverse=True)
return flask.render_template('base.html', query=query, results=found, total_papers=len(sorted_papers))
| {
"repo_name": "Smerity/search_iclr_2018",
"stars": "78",
"repo_language": "HTML",
"file_name": "README.md",
"mime_type": "text/plain"
} |
import glob
import json
from collections import defaultdict
import numpy as np
def extract_ratings(data):
ratings = []
for x in data['notes']:
if 'content' not in x: continue
if 'rating' not in x['content']: continue
rate = int(x['content']['rating'].split(':')[0])
conf = int(x['content']['confidence'].split(':')[0])
ratings.append((rate, conf))
return sorted(ratings, reverse=True)
comments = {}
for fn in glob.glob('reviews/*'):
try:
key = fn.split('/')[1]
comments[key] = json.loads(open(fn, encoding='utf-8', errors='ignore').read())
except json.JSONDecodeError:
pass
print('Total reviews:', len(comments))
ratings = defaultdict(list)
for key in comments.keys():
ratings[key] = extract_ratings(comments[key])
sorted_ratings = [(k, [v[0] for v in vals]) for k, vals in ratings.items()]
sorted_ratings = sorted(sorted_ratings, key=lambda v: np.mean(v[1]) if v[1] else 0)
sorted_papers = [x[0] for x in sorted_ratings]
| {
"repo_name": "Smerity/search_iclr_2018",
"stars": "78",
"repo_language": "HTML",
"file_name": "README.md",
"mime_type": "text/plain"
} |
sample = '''<li class="note" data-id="SyyGPP0TZ"><h4>
<a href="https://openreview.net/forum?id=SyyGPP0TZ&noteId=SyyGPP0TZ">Regularizing and Optimizing LSTM Language Models</a>
<a href="https://openreview.net/pdf?id=SyyGPP0TZ" class="pdf-link" title="Download PDF" target="_blank"><img src="./ICLR 2018 Conference _ OpenReview_files/pdf_icon_blue.svg"></a>'''
import re
txt = open('iclr.htm').read()
pdf_re = re.compile(r'<a href="([^"]+)" class="pdf-link"')
name_pdf_re = re.compile(r'<a href="([^"]+)" class="pdf-link"')
links = pdf_re.findall(txt)
for i, link in enumerate(links):
print('echo Paper {} of {}: {}'.format(i, len(links), link.split('?id=')[1]))
print('wget {} -o {}.pdf'.format(link, link.split('?id=')[1]))
print('sleep 1')
| {
"repo_name": "Smerity/search_iclr_2018",
"stars": "78",
"repo_language": "HTML",
"file_name": "README.md",
"mime_type": "text/plain"
} |
# Search ICLR 2018
Searching papers submitted to ICLR 2018 can be painful.
You might want to know which paper uses technique X, dataset D, or cites author ME.
Unfortunately, search is limited to titles, abstracts, and keywords, missing the actual contents of the paper.
This Frankensteinian search has been made to help scour the papers by ripping out their souls using `pdftotext`.
This code is evil.
Truly mind boggingly evil.
If you read the 31 lines of Python code in reverse you'll likely summon division by zero demons that will slowly consume your sanity whilst NaN bugs work their way into all your deep learning code.
Having said that, to stare into the abyss of evil and run this for yourself:
```
pip install flask gunicorn
sudo apt-get install authbind
mkdir log
./serve.sh
```
By default it uses `authbind` to bind the privileged port 80 without root.
If you're running this locally you probably don't care either way.
Good luck, and may your reviewers have no commentary but praise.
| {
"repo_name": "Smerity/search_iclr_2018",
"stars": "78",
"repo_language": "HTML",
"file_name": "README.md",
"mime_type": "text/plain"
} |
#! /usr/bin/env python3
"""
HackerRank testcases downloader
"""
import argparse
import zipfile
import os
import requests
import re
parser = argparse.ArgumentParser(description='Download testcases (practice challenges only).')
parser.add_argument('name', help="Challenge name")
parser.add_argument('url', nargs='*', help="test case url")
parser.add_argument('-o', '--overwrite', help="Overwrite", action="store_true")
args = parser.parse_args()
name = re.sub(r'^.*\.hackerrank\.com/challenges/([\w\d\-].*)/.*$', r'\1', args.name)
if name is None and bool(re.match(r"[\da-z\-]+", args.name)):
name = args.name
zip = os.path.join(os.path.dirname(__file__), "testcases2", "master", name + "-testcases.zip")
if not os.path.exists(zip) or args.overwrite:
print("create", zip)
z = zipfile.ZipFile(zip, mode="w", compression=zipfile.ZIP_DEFLATED)
else:
print("open", zip)
z = zipfile.ZipFile(zip, mode="a", compression=zipfile.ZIP_DEFLATED)
def add_testcase(url):
m = re.search(r'(in|out)put\d\d\.txt', url)
if m:
arcname = m.group(0)
if arcname.startswith("input"):
arcname = "input/" + arcname
elif arcname.startswith("output"):
arcname = "output/" + arcname
data = requests.get(url)
print("Add {} size {} into archive".format(arcname, len(data.content)))
z.writestr(arcname, data.content)
else:
m = re.search(r'([io])(\d+)', url)
if m:
if m.group(1) == 'i':
arcname = "input/input"
else:
arcname = "output/output"
arcname += "{:02d}.txt".format(int(m.group(2)))
print("Enter data for {}, terminate with a empty line".format(arcname))
data = ""
while True:
s = input().strip()
if s == "":
break
data += s + "\n"
print("Add {} size {} into archive".format(arcname, len(data)))
z.writestr(arcname, data)
if len(args.url) == 0:
while True:
url = input("url> ").strip()
if url == "":
break
add_testcase(url)
else:
for url in args.url:
add_testcase(url)
print()
z.printdir()
# for e in z.infolist():
# print("{} {:10} {:10} {}".format(e.date_time, e.compress_size, e.file_size, e.filename))
z.close()
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
#! /usr/bin/env python3
import collections
import sqlite3
import glob
import json
import os
import sys
import logging
my_fields = ['id', 'slug', 'name',
'track_id', 'track_slug', 'track_name',
'contest_slug',
'primary_contest_refid',
# 'custom_case', 'is_custom', 'custom',
'difficulty_name',
'custom_checker_language', 'checker_program',
'default_language', 'status'
]
def reorder_fields(f):
if f in my_fields:
return my_fields.index(f)
return len(my_fields) + 1
class hr_db:
def __init__(self):
self.conn = sqlite3.connect('hackerrank.db')
self.tables = {}
self.inserts = {}
self.ids = {}
def load_models(self):
models = []
# load in memory models from offline tree
logging.debug("scanning files...")
for i in glob.iglob(os.path.join(os.path.dirname(__file__),
"offline", "models", "**", "*.json"),
recursive=True):
with open(i, "rb") as f:
data = json.load(f)
if data['status'] is True:
m = data['model']
models.append(m)
logging.debug("%d models found", len(models))
# analyze models
for m in models:
self.analyse(m, "challenge")
# create the database tables
c = self.conn.cursor()
for name, fields in self.tables.items():
sql = "drop table if exists `{}`".format(name)
c.execute(sql)
sql = "create table if not exists `{}` (\n".format(name)
sql += ",\n".join("`{}` {}".format(k, fields[k])
for k in sorted(fields.keys(), key=reorder_fields))
sql += ")"
c.execute(sql)
sql = "insert into `{}` (\n".format(name)
sql += ",".join("`{}`".format(k) for k in fields.keys())
sql += ") values ("
sql += (",?" * len(fields))[1:]
sql += ")"
self.inserts[name] = sql
self.ids[name] = set()
logging.info("table: %s %d", name, len(fields))
c.close()
self.conn.commit()
# store models into the database
logging.debug("load models")
c = self.conn.cursor()
for m in models:
self.charge(c, m, "challenge")
c.close()
self.conn.commit()
def charge(self, c, values, tablename):
if values['id'] in self.ids[tablename]:
return
columns = []
for k in self.tables[tablename].keys():
if k.endswith("_refid"):
k = k[:-6]
v = values.get(k)
if isinstance(v, dict):
if "id" in v:
self.charge(c, v, k)
v = v["id"]
else:
v = json.dumps(v)
elif isinstance(v, list):
if len(v) > 0 and not isinstance(v[0], str):
v = json.dumps(v)
else:
try:
v = "|".join(v)
except TypeError:
print(k)
raise
columns.append(v)
c.execute(self.inserts[tablename], columns)
self.ids[tablename].add(values['id'])
def analyse(self, values, tablename):
if tablename not in self.tables:
logging.debug("discover table %s", tablename)
fields = self.tables[tablename] = collections.OrderedDict()
else:
fields = self.tables[tablename]
for k, v in values.items():
if k.endswith('_template') or k.endswith('_template_head') or k.endswith('_template_tail'): continue
if k.endswith('_skeliton_head') or k.endswith('_skeliton_tail'): continue
if isinstance(v, str):
t = "text"
elif isinstance(v, bool):
t = "boolean"
elif isinstance(v, int):
# enforce type for these fields
if k in ["max_score", "factor", "success_ratio"]:
t = "float"
else:
t = "integer"
if k == "id":
t += " primary key not null"
elif isinstance(v, float):
t = "float"
elif isinstance(v, list):
t = "text"
elif v is None:
continue
elif isinstance(v, dict):
if "id" in v:
t = "integer"
self.analyse(v, k)
k += "_refid"
else:
t = "text"
else:
logging.error("unknown type: %s", type(v))
if k in fields:
if fields[k] != t:
logging.warning("mismatch {:20} type={} found={} value={} of {}".format(k, fields[k], t, repr(v), tablename))
else:
fields[k] = t
def set_logging(verbose):
""" set up a colorized logger """
if sys.stdout.isatty():
logging.addLevelName(logging.DEBUG, "\033[0;32m%s\033[0m" % logging.getLevelName(logging.DEBUG))
logging.addLevelName(logging.INFO, "\033[1;33m%s\033[0m" % logging.getLevelName(logging.INFO))
logging.addLevelName(logging.WARNING, "\033[1;35m%s\033[1;0m" % logging.getLevelName(logging.WARNING))
logging.addLevelName(logging.ERROR, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.ERROR))
if verbose:
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.DEBUG, datefmt='%H:%M:%S')
else:
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.ERROR, datefmt='%H:%M:%S')
if __name__ == '__main__':
set_logging(True)
menu = hr_db()
menu.load_models()
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
[flake8]
max-line-length = 120
exclude = python/* algorithms/* tutorials/* mathematics/* data-structures/* challenges/* contests/* security/* work/* debug/* build*/* wsl/* regex/* interview-preparation-kit/*
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
#! /usr/bin/env python3
import tkinter as tk
import tkinter.ttk as ttk
import sqlite3
import glob
import json
import platform
import subprocess
import os
import webbrowser
import hrinit
import uuid
def raise_app():
if platform.system() == 'Darwin':
# os.system('''/usr/bin/osascript -e 'tell app "Finder" to set frontmost of process "Python" to true' ''') # noqa
subprocess.call([
'/usr/bin/osascript', '-e',
'tell app "System Events" to set frontmost of every process whose unix id is {} to true'.format(os.getpid()) # noqa
])
# Credit to https://gist.github.com/lukestanley/8525f9fdcb903a43376a35a77575edff
def json_tree(tree, parent, dictionary):
for key in dictionary:
opened = (key == "model" or key == "track")
uid = uuid.uuid4()
if isinstance(dictionary[key], dict):
tree.insert(parent, 'end', uid, text=key, open=opened, tag="d")
json_tree(tree, uid, dictionary[key])
elif isinstance(dictionary[key], list):
tree.insert(parent, 'end', uid, text=key + '[]')
json_tree(tree,
uid,
dict([(i, x) for i, x in enumerate(dictionary[key])]))
else:
value = dictionary[key]
if value is None:
value = 'None'
try:
if isinstance(key, str) and (key.find("_template") != -1
or key.find("_skeliton") != -1 or key == "body_html"):
tree.insert(parent, 'end', uid, text=key, value="<not\ shown>")
else:
tree.insert(parent, 'end', uid, text=key, value=str(value))
except tk.TclError as e:
print(e, key)
tree.insert(parent, 'end', uid, text=key,
value="<" + str(e).replace(' ', '\\ ') + ">")
pass
def show_data(data):
# Setup the root UI
root = tk.Tk()
root.title("JSON viewer")
root.columnconfigure(0, weight=1)
root.rowconfigure(0, weight=1)
# Setup the Frames
tree_frame = ttk.Frame(root, padding="3")
tree_frame.grid(row=0, column=0, sticky=tk.NSEW)
# Setup the Tree
tree = ttk.Treeview(tree_frame, columns='Values')
tree.tag_configure("d", foreground='blue')
tree.column('Values', width=100)
tree.heading('Values', text='Values')
json_tree(tree, '', data)
tree.pack(fill=tk.BOTH, expand=1)
# Limit windows minimum dimensions
root.update_idletasks()
root.minsize(500, 500)
raise_app()
root.mainloop()
class hr_menu:
def __init__(self):
# self.conn = sqlite3.connect('menu.db')
self.conn = sqlite3.connect(':memory:')
def load_models(self):
models = []
c = self.conn.cursor()
c.execute("drop table if exists challenge")
c.execute('''
create table if not exists challenge (
serial integer,
id integer, -- 2532
slug text, -- "solve-me-first"
name text, -- "Solve Me First"
contest_slug text, -- "master"
contest_name text, -- "Master"
category text, -- "ai"
kind text, -- "code"
preview text,
difficulty text, -- "Easy"
track_id integer, -- 3
track_slug text, -- "algorithms"
track_name text, -- "Algorithms"
subtrack_id integer, -- 43
subtrack_slug text, -- "warmup"
subtrack_name text, -- "Warmup"
solved boolean
)''')
c.execute('create unique index if not exists challenge_index on challenge (contest_slug, slug)') # noqa
c.close()
self.conn.commit()
for i in glob.iglob(os.path.join(os.path.dirname(__file__), "offline", "contests", "*.json")): # noqa
with open(i, "rb") as f:
data = json.load(f)
m = data['models']
# filter out interview-preparation-kit and empty contests
if len(m) > 0 and 'id' in m[0]:
for j in m:
j['__file__'] = i # add source filename for error reporting
models.extend(m)
else:
# print("ignoring", i)
pass
c = self.conn.cursor()
sql = '''
insert into challenge (serial,
id, slug, name, contest_slug,
category, kind, preview, difficulty,
track_id, track_slug, track_name,
subtrack_id, subtrack_slug, subtrack_name, solved)
values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)'''
for i, m in enumerate(models):
if 'name' not in m:
continue
try:
fields = (i,
m['id'],
m['slug'],
m['name'],
m['contest_slug'],
m['category'],
m['kind'],
m['preview'],
m['difficulty_name'])
if 'track' in m and m['track']:
fields += (m['track']['track_id'],
m['track']['track_slug'],
m['track']['track_name'],
m['track']['id'],
m['track']['slug'],
m['track']['name'])
path = os.path.join(os.path.dirname(__file__),
m['track']['track_slug'],
m['track']['slug'], m['slug'] + ".*")
else:
if m['contest_slug'] != "projecteuler":
fields += (0, "Contests", "Contests", 0, m['contest_slug'], m['contest_slug']) # noqa
else:
fields += (0, m['contest_slug'], m['contest_slug'], 0, None, None)
path = os.path.join(os.path.dirname(__file__),
m['contest_slug'], m['slug'] + ".*")
fields += (len(glob.glob(path)) != 0,)
except KeyError:
print(m)
raise
try:
c.execute(sql, fields)
except sqlite3.IntegrityError as e:
print(e, m['contest_slug'], m['slug'])
pass
c.close()
self.conn.commit()
def on_event(self, event):
items = self.tree.selection()
if len(items) != 1:
return
item = items[0]
# print("event", event, self.tree.item(item, "text"), str(item))
print(">", str(item))
def get_selected_challenge(self, what=None):
"""
returns the challenge path for its model/statement/source
"""
items = self.tree.selection()
if len(items) != 1:
return
k = str(items[0])
# deux cas:
# domain[/subdomain]
# contest~slug
p = k.split('~')
if len(p) != 2:
return
if p[0] == 'master':
# i.e. master/mathematics/fundamentals/even-odd-query
c = self.conn.cursor()
c.execute("select track_slug,subtrack_slug from challenge where contest_slug='master' and slug=?", (p[1],)) # noqa
r = c.fetchone()
path = os.path.join(p[0], r['track_slug'], r['subtrack_slug'], p[1])
path_source = os.path.join(r['track_slug'], r['subtrack_slug'], p[1])
url = "https://www.hackerrank.com/challenges/{}".format(p[1])
c.close()
else:
# i.e. projecteuler/euler001
path = os.path.join(p[0], p[1])
path_source = path
url = "https://www.hackerrank.com/contests/{}/challenges/{}".format(p[0], p[1])
rootdir = os.path.relpath(os.path.dirname(__file__))
paths = {}
paths["model"] = os.path.join(rootdir, "offline", "models", path + ".json")
paths["statement"] = os.path.join(rootdir, "offline", "statements", p[0], p[1] + ".pdf")
paths["source"] = os.path.join(rootdir, path_source)
paths["url"] = url
paths['model_data'] = None
if os.path.exists(paths["model"]):
data = json.load(open(paths['model']))
if data['status'] is True:
paths['model_data'] = data['model']
if what:
return paths[what]
return paths
def cmd_open_challenge(self, path):
if not path:
return
files = glob.glob(path + ".*")
if len(files) >= 1:
if platform.system() == 'Windows':
subprocess.check_call(["code.cmd", *files])
else:
subprocess.check_call(["code", *files])
def cmd_hrinit(self, lang="*", add_test=True):
path = self.get_selected_challenge("model")
if not path:
return
with open(path) as f:
data = f.read()
p = hrinit.HackerRankParser()
p.feed(data)
p.info()
p.download()
p.gen_stub(lang, add_test=add_test)
def on_popup(self, event):
p = self.current_paths = self.get_selected_challenge()
if not self.current_paths:
return
def state(flag):
# normal/active/disabled
return ["disabled", "active"][flag]
menu_lang = tk.Menu(self.root, tearoff=0)
try:
languages = p['model_data']['languages']
for lang in set(languages) - set(['bash', 'python3', 'cpp14']):
menu_lang.add_command(label=lang)
except KeyError:
languages = []
menu = tk.Menu(self.root, tearoff=0)
menu.add_command(label="Show on HackerRank",
command=lambda: webbrowser.open(p['url']))
menu.add_command(label="Show PDF statement",
state=state(os.path.exists(p['statement'])),
command=lambda: subprocess.call(["open", p['statement']]))
menu.add_separator()
menu.add_command(label="Model details",
state=state(os.path.exists(p['model'])),
command=lambda: show_data(p['model_data']))
menu.add_command(label="Open in Visual Studio Code",
state=state(len(glob.glob(p['source'] + ".*")) != 0),
command=lambda: self.cmd_open_challenge(p['source']))
menu.add_separator()
# add the language section
if len(languages) == 0:
menu.add_command(label="No language available", state="disabled")
elif len(languages) == 1:
menu.add_command(label="Init " + languages[0],
command=lambda: self.cmd_hrinit(languages[0]))
if languages[0] == "text":
# special case when the challenge except the plain text response,
# not a program
menu.add_command(label="Add Python3",
command=lambda: self.cmd_hrinit("python3", False))
menu.add_command(label="Add C++",
command=lambda: self.cmd_hrinit("cpp14", True))
else:
menu.add_command(label="Init Python3",
state=state('python3' in languages),
command=lambda: self.cmd_hrinit("python3"))
menu.add_command(label="Init C++",
state=state('cpp14' in languages),
command=lambda: self.cmd_hrinit("cpp14"))
# menu.add_command(label="Init Bash",
# state=state('bash' in languages),
# command=lambda: self.cmd_hrinit("bash"))
menu.add_cascade(label="Other languages", menu=menu_lang)
try:
menu.tk_popup(event.x_root, event.y_root, 0)
finally:
try:
menu.grab_release()
except tk.TclError:
pass
def show(self):
self.root = root = tk.Tk()
root.wm_title("HackerRank challenges")
content = ttk.Frame(root, padding=(4, 4, 4, 4))
self.tree = tree = ttk.Treeview(content, height=30)
tree.tag_configure("solved", foreground='#37a90c')
tree.heading("#0", text="Name")
tree.column("#0", width=400)
tree["columns"] = ("preview", "difficulty")
tree.column("preview", width=500)
tree.column("difficulty", width=80)
tree.heading("preview", text="preview")
tree.heading("difficulty", text="difficulty")
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
self.conn.row_factory = dict_factory
c = self.conn.cursor()
n = 0
prev_track = None
prev_subtrack = None
for r in c.execute("select * from challenge order by track_slug, serial"):
if r['track_slug'] != prev_track:
prev_track = r['track_slug']
prev_subtrack = None
tree.insert("", n, r['track_slug'], text=r['track_name'])
n += 1
if r['subtrack_slug'] != prev_subtrack:
prev_subtrack = r['subtrack_slug']
tree.insert(r['track_slug'], n, r['track_slug'] + "/" + r['subtrack_slug'],
text=r['subtrack_name'])
n += 1
if r['subtrack_slug'] is None:
k = r['track_slug']
else:
k = r['track_slug'] + '/' + r['subtrack_slug']
tag = ""
if r['solved']:
tag = "solved"
tree.insert(k, n, r['contest_slug'] + '~' + r['slug'],
text=r['name'],
values=(r['preview'], r['difficulty']), tag=tag)
n += 1
c.close()
tree.grid(column=0, row=0, sticky=(tk.N, tk.S, tk.E, tk.W))
content.grid(column=0, row=0, sticky=(tk.N, tk.S, tk.E, tk.W))
root.columnconfigure(0, weight=1)
root.rowconfigure(0, weight=1)
content.columnconfigure(0, weight=1)
content.rowconfigure(0, weight=1)
tree.bind("<Double-1>", self.on_event)
if platform.system() == 'Darwin':
tree.bind("<Button-2>", self.on_popup)
else:
tree.bind("<Button-3>", self.on_popup)
raise_app()
root.mainloop()
if __name__ == '__main__':
menu = hr_menu()
menu.load_models()
menu.show()
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
# Build and test HackerRank solutions
#
cmake_minimum_required(VERSION 3.5)
project(hackerrank)
#
# Options
#
option(HACKERRANK_FP "Toggle Functional Programming" OFF)
option(HACKERRANK_JAVA "Toggle Java" ON)
option(HACKERRANK_JS "Toggle Javascript (Node.js)" ON)
#
# Check environment
#
if(HACKERRANK_JAVA)
find_program(JAVAC_BIN javac DOC "path to Java compiler")
if(NOT JAVAC_BIN)
set(HACKERRANK_JAVA OFF)
message(WARNING "Disabling Java: compiler not found")
else()
#message(STATUS "Java compiler: ${JAVAC_BIN}")
endif()
endif()
if(HACKERRANK_JS)
find_program(NODEJS_BIN node DOC "path to Node.js runtime")
if(NOT NODEJS_BIN)
set(HACKERRANK_JS OFF)
message(WARNING "Disabling Node.js: runtime not found")
else()
#message(STATUS "Node.js runtime: ${NODEJS_BIN}")
endif()
endif()
#
# Compilation settings
#
set(CMAKE_EXPORT_COMPILE_COMMANDS on)
if(HACKERRANK_FP)
set(CMAKE_MODULE_PATH "${CMAKE_MODULE_PATH};${CMAKE_SOURCE_DIR}/cmake-haskell/cmake")
endif()
if(HACKERRANK_JAVA)
find_package(Java REQUIRED)
include(UseJava)
endif()
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_C_STANDARD 11)
if((CMAKE_CXX_COMPILER_ID MATCHES "Clang") OR (CMAKE_CXX_COMPILER_ID MATCHES "GNU"))
# enforce a clean code
set(CMAKE_CXX_FLAGS "-Wall -Wextra -Wconversion -Wno-unused-result")
set(CMAKE_C_FLAGS "-Wall -Wextra -Wconversion -Wno-unused-result")
endif()
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
# clang doesn't have the handy gcc STL header file
configure_file(stdc++.h.in bits/stdc++.h COPYONLY @ONLY)
include_directories(${CMAKE_CURRENT_BINARY_DIR})
endif()
enable_testing()
#
# Macros
#
# target to extract the testcases
add_custom_target(extract-testcases
COMMAND ${CMAKE_SOURCE_DIR}/runtest.sh -X ${CMAKE_BINARY_DIR}/tests
WORKING_DIRECTORY ${CMAKE_BINARY_DIR})
# run a challenge
set(HACKERRANK_CONTEST master)
macro(add_test_hackerrank name)
add_test(NAME ${name}
COMMAND ${CMAKE_SOURCE_DIR}/runtest.sh -t ${name} -c ${HACKERRANK_CONTEST} -T ${CMAKE_BINARY_DIR}/tests
WORKING_DIRECTORY ${CURRENT_BINARY_DIR})
set_tests_properties(${name} PROPERTIES TIMEOUT 30)
endmacro()
# add a C/C++ challenge
macro(add_hackerrank name src)
add_executable(${name} ${src})
add_test_hackerrank(${name})
endmacro(add_hackerrank name src)
# hackerrank problem setters really should improve their code quality
macro(dirty_cpp name)
target_compile_options("${name}" PRIVATE "-Wno-conversion")
#target_compile_options("${name}" PRIVATE "-Wno-sign-conversion")
target_compile_options("${name}" PRIVATE "-Wno-sign-compare")
target_compile_options("${name}" PRIVATE "-Wno-unused-parameter")
target_compile_options("${name}" PRIVATE "-Wno-reorder")
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
target_compile_options("${name}" PRIVATE "-Wno-shorten-64-to-32")
endif()
endmacro(dirty_cpp name)
# add a Python challenge
macro(add_hackerrank_py name)
configure_file("${name}" "${name}" COPYONLY @ONLY)
add_test_hackerrank(${name})
endmacro(add_hackerrank_py name)
# add a Shell challenge
macro(add_hackerrank_shell name)
configure_file("${name}" "${name}" COPYONLY @ONLY)
add_test_hackerrank(${name})
endmacro(add_hackerrank_shell name)
# add a Java challenge (only if option is on)
function(add_hackerrank_java src)
if(HACKERRANK_JAVA)
cmake_parse_arguments(_hr_java
""
"CLASS"
""
${ARGN}
)
if(NOT DEFINED _hr_java_CLASS)
set(_hr_java_CLASS Solution)
endif()
get_filename_component(name ${src} NAME_WE)
set(JAR_NAME ${name}_java)
set(JAVA_SOURCE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/${name}_java)
file(MAKE_DIRECTORY ${JAVA_SOURCE_DIRECTORY})
set(JAVA_SOURCE_FILES ${JAVA_SOURCE_DIRECTORY}/${_hr_java_CLASS}.java)
configure_file(${src} ${JAVA_SOURCE_FILES} COPYONLY @ONLY)
set(JAVA_DAI_MANIFEST ${CMAKE_CURRENT_BINARY_DIR}/${name}_java/manifest)
file(WRITE ${JAVA_DAI_MANIFEST} "Main-Class: ${_hr_java_CLASS}\n")
add_jar(${JAR_NAME} ${JAVA_SOURCE_FILES} MANIFEST ${JAVA_DAI_MANIFEST}
OUTPUT_NAME ${name})
add_test_hackerrank(${name}.jar)
endif()
endfunction()
# add a Node.js challenge
function(add_hackerrank_js name)
if(HACKERRANK_JS)
configure_file("${name}" "${name}" COPYONLY @ONLY)
add_test_hackerrank(${name})
endif()
endfunction()
#
# Contests and domains
#
add_subdirectory(mathematics)
add_subdirectory(algorithms)
add_subdirectory(data-structures)
add_subdirectory(cpp)
add_subdirectory(python)
add_subdirectory(shell)
add_subdirectory(c)
add_subdirectory(java)
add_subdirectory(security)
add_subdirectory(regex)
add_subdirectory(tutorials)
add_subdirectory(contests)
add_subdirectory(interview-preparation-kit)
if(HACKERRANK_FP)
add_subdirectory(fp)
endif()
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
#! /bin/bash
# update testcases archive and sync GitHub repo
which -s python3 || { echo "Missing Python3"; exit 2; }
cwd="$(cd $(dirname $0); pwd)"
cd "${cwd}"
GITHUB_REPO=../GitHub/hackerrank
DOMAINS=($(python3 -c 'import yaml;print(*yaml.load(open(".hr_conf.yaml"))["domains"])'))
COLOR_LIGHT_RED="\033[1;31m"
COLOR_LIGHT_GREEN="\033[1;32m"
COLOR_YELLOW="\033[1;33m"
COLOR_LIGHT_PURPLE="\033[1;35m"
COLOR_END="\033[0m"
cmd_testcases_archive()
{
# do not rebuild the archive if no testcases have been downloaded since last build
if python3 hr_count.py --latest; then
echo -e "${COLOR_LIGHT_PURPLE}Testcases archive is up-to-date${COLOR_END}"
return
fi
echo -e "${COLOR_LIGHT_PURPLE}Preparing testcases...${COLOR_END}"
rm -rf tmp
for i in testcases/*/*-testcases.zip ; do
contest=$(basename $(dirname $i))
mkdir -p tmp/$contest
if [ $contest = master ]; then
unzip -q -o -d tmp/$contest/$(basename $i -testcases.zip) $i
if [ $? -eq 0 ]; then
echo -n .
else
echo -n X
fi
else
# les contests ont l'ensemble des testcases: trop gros !
unzip -q -o -d tmp/$contest/$(basename $i -testcases.zip) $i input/input00.txt output/output00.txt 2>/dev/null
if [ $? -ne 0 ]; then
# mais pas toujours le testcase 00...
unzip -q -o -d tmp/$contest/$(basename $i -testcases.zip) $i
if [ $? -eq 0 ]; then
echo -n 2
else
echo -n Y
fi
else
echo -n 1
fi
fi
done
# s'il manque des testcases, on les cherche dans la version "achetée"
for i in testcases/*/*-testcases.err ; do
contest=$(basename $(dirname $i))
mkdir -p tmp/$contest
t=testcases2/$contest/$(basename $i .err).zip
if [ -f $t ]; then
unzip -q -o -d tmp/$contest/$(basename $i -testcases.err) $t
echo -n .
else
echo
echo "No testcase for $t"
fi
done
# ajoute les testcases supplémentaires
# for i in testcases_extra/*/*-testcases.zip ; do
# contest=$(basename $(dirname $i))
# mkdir -p tmp/$contest
# unzip -q -o -d tmp/$contest/$(basename $i -testcases.zip) $i
# if [ $? -eq 0 ]; then
# echo -n 3
# else
# echo -n Z
# fi
# done
echo
echo -e "${COLOR_LIGHT_PURPLE}Generating testcases archive...${COLOR_END}"
find tmp -name "*.txt" | cut -b5- | xargs tar -C tmp -cJf testcases.tar.xz
count=$(($(ls -d tmp/*/* | wc -l)))
rm -rf tmp
echo " testcases count: $count"
}
# count challenges and update the main README.md
#
cmd_count()
{
echo -e "${COLOR_LIGHT_PURPLE}Counting...${COLOR_END}"
counts=($(python3 hr_count.py $@))
echo " challenges: ${counts[0]} (unique)"
echo " challenges: ${counts[1]} (overall)"
echo " solutions: ${counts[2]} (for all languages)"
#echo " solutions: ${counts[3]} (overall)"
challenges=${counts[0]}
sed -e "s/[[:digit:]]*\( solutions and counting\)/"$challenges"\1/" \
-e "s/\(Challenges\-\)[[:digit:]]*\(-blue\)/\1"$challenges"\2/" README.md > README.md.tmp
if diff -qs README.md README.md.tmp > /dev/null; then
rm README.md.tmp
else
echo " rewrite README.md"
mv -f README.md.tmp README.md
fi
}
# generates all index README.md
#
cmd_readme()
{
echo -e "${COLOR_LIGHT_PURPLE}Generating README...${COLOR_END}"
python3 ./hr_table.py
}
# build and test challenges
#
cmd_build_test()
{
echo -e "${COLOR_LIGHT_PURPLE}Build and test...${COLOR_END}"
[ "$1" = "commit" ] && echo -e "${COLOR_YELLOW}Will git-commit if ok${COLOR_END}"
if [ $(uname) = Darwin ] ; then
nproc()
{
sysctl -n hw.logicalcpu
}
fi
(
cd "${cwd}"
gh_src="$(cd ${GITHUB_REPO}; pwd)"
mkdir -p build/github
cd build/github
cmake -DHACKERRANK_FP:BOOL=OFF -DCMAKE_BUILD_TYPE=Release "${gh_src}"
make -j$(nproc)
make extract-testcases
ctest -j$(nproc) --output-on-failure
success=$?
echo
if [ ${success} -eq 0 ]; then
echo -e "${COLOR_LIGHT_GREEN}Hurrah! Everything's fine :)${COLOR_END}"
cd "${gh_src}"
if [ "$1" = "commit" ] ; then
if [ "$2" = "" ] ; then
msg="auto commit $(date +'%h %d %H:%M')"
else
msg="$2"
fi
echo
git commit -a -m "${msg}"
fi
else
echo -e "${COLOR_LIGHT_RED}Something goes wrong :(${COLOR_END}"
fi
)
}
# sync the current repo to the public one
#
cmd_rsync()
{
echo -e "${COLOR_LIGHT_PURPLE}Rsync...${COLOR_END}"
# copie les fichiers vers le dépôt GitHub
rsync -av --delete --exclude .DS_Store --exclude="*.tmp" --exclude="*.nosync" --exclude="tests" \
${DOMAINS[*]} \
coding-dojo \
LICENSE \
.vscode CMakeLists.txt requirements.txt README.md stdc++.h.in runtest.sh compare.py hrinit.py hrtc2.py \
testcases.tar.xz \
setup.cfg \
.travis.yml _config.yml \
hr_count.py hr_github.sh hr_interview.py hr_offline.py hr_table.py Makefile .hr_conf.yaml \
hr_db.py hr_menu.py \
${GITHUB_REPO}
}
# help/usage
#
cmd_usage()
{
echo "Usage: $0 [options]"
echo " -h help"
echo " -t make testcases archive"
echo " -b build and run tests"
echo " -a [commit [<message>]] make archive then build and test, then optionally git-commit"
echo
echo "Without option, update README.md files and sync repo."
exit 0
}
# main
#
opt_archive=
opt_build=
while getopts "htbaX:" option; do
# echo option=$option OPTARG=$OPTARG OPTIND=$OPTIND OPTERR=$OPTERR OPTSTRING=$OPTSTRING
case "${option}" in
h|\?) cmd_usage ;;
t) opt_archive=1 ;;
b) opt_build=1 ;;
a) opt_archive=1 ; opt_build=1 ;;
X) shift $((OPTIND-1)) ; cmd_${OPTARG} "$@"; exit 0 ;; # run individual command
esac
done
shift $((OPTIND-1))
cmd_readme
cmd_count ${DOMAINS[*]}
[ $opt_archive ] && cmd_testcases_archive
cmd_rsync
[ $opt_build ] && cmd_build_test "$@"
echo
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
#!/bin/bash
#set -x
#echo $* >> runtest-cmdline.txt
# program parameters
rootdir=$(dirname "$0")
contest=master
quiet=
number= # testcase number (default 0)
testsdir= # path for testcases files (<tests>/<contest>/<challenges>/input/...)
extract_tests=
# program usage
usage()
{
echo "Usage: runtest.sh [options]"
echo " -h,--help : help"
echo " -t,--testcase : testcase name"
echo " -n,--number : testcase number"
exit $1
}
# colors for term
set_colors()
{
if [ -t 1 -a -t 0 ]; then
COLOR_RED="\033[91m"
COLOR_GREEN="\033[92m"
COLOR_YELLOW="\033[93m"
COLOR_LIGHT_PURPLE="\033[94m"
COLOR_PURPLE="\033[95m"
COLOR_CYAN="\033[0;36m"
COLOR_END="\033[0m"
else
COLOR_RED=
COLOR_GREEN=
COLOR_YELLOW=
COLOR_LIGHT_PURPLE=
COLOR_PURPLE=
COLOR_END=
fi
}
# compare command
compare()
{
python3 "${rootdir}/compare.py" "$1" "$2"
}
close_std()
{
if [ $CTEST_INTERACTIVE_DEBUG_MODE ]; then
# close stdout and stderr
exec 1<&-
exec 2<&-
# open stdout as a file for read and write.
exec 1<>$testname.log
# redirect stderr to stdout
exec 2>&1
fi
}
##############################################################################
# read the options
if [ "$(uname)" = "Darwin" ]; then
ARGS=`getopt hqc:t:n:T:X: $*`
else
ARGS=`getopt -o hqc:t:n:T:X: --long help,quiet,contest:,test:,number: -n 'runtest.sh' -- "$@"`
fi
eval set -- "$ARGS"
[ $? != 0 ] && usage 2
# extract options and their arguments into variables.
for i ; do
case "$i" in
-h|--help) usage ;;
-q|--quiet) quiet=1 ; shift ;;
-c|--contest) contest=$2 ; shift 2 ;;
-t|--test) testname=$2 ; shift 2 ;;
-n|--number) number=$2 ; shift 2 ;;
-T) testsdir=$2 ; shift 2 ;;
-X) extract_tests=$2 ; shift 2;;
--) shift ; break ;;
esac
done
if [ $extract_tests ]; then
if [ -s "${rootdir}/testcases.tar.xz" ]; then
mkdir -p "${extract_tests}"
tar -C "${extract_tests}" -xJf "${rootdir}/testcases.tar.xz"
exit $?
fi
exit 1
fi
# alternate syntax
if [ -z "${testname}" ]; then
testname=$1
[ -z "${testname}" ] && usage 3
fi
# testcase file must exist
[ -f "${testname}" ] || usage 4
# for batch/interactive processing
set_colors
close_std
# extract the extension
extension="${testname##*.}"
if [ "${extension}" == "py" ]; then
exe="python3 ${testname}"
testname="${testname%.*}"
result=result${extension}
elif [ "${extension}" == "sh" ]; then
exe="bash ${testname}"
testname="${testname%.*}"
result=result${extension}
elif [ "${extension}" == "jar" ]; then
exe="java -jar ${testname}"
testname="${testname%.*}"
result=result${extension}
elif [ "${extension}" == "js" ]; then
exe="node ${testname}"
testname="${testname%.*}"
result=result${extension}
else
exe=./${testname}
result=result-${testname}
# special case for duplicated challenges (practice/contest)
testname=${testname%_*}
fi
##############################################################################
# trois considérations:
# - le répertoire <tests>/<testname>/input/ existe
# - le fichier <rootdir>/testcases/<contest>/<testname>-testcases.zip
# - le fichier <rootdir>/testcases2/<contest>/<testname>-testcases.zip
if [ "${testsdir}" != "" -a -d "${testsdir}/${contest}/${testname}" ]; then
testsdir="${testsdir}/${contest}"
else
testsdir=tests
mkdir -p tests
# si les fichiers de testcases existent: on les extrait
zip="${rootdir}/testcases/${contest}/${testname}-testcases.zip"
if [ -s "${zip}" ]; then
unzip -q -o -d tests/${testname} "${zip}"
fi
zip="${rootdir}/testcases2/${contest}/${testname}-testcases.zip"
if [ -s "${zip}" ]; then
unzip -q -o -d tests/${testname} "${zip}"
fi
zip="${rootdir}/offline/testcases/${contest}/${testname}-testcases.zip"
if [ -s "${zip}" ]; then
unzip -q -o -d tests/${testname} "${zip}"
fi
fi
# the root folder for inputs, outputs and results
testdir=${testsdir}/${testname}
# si on n'a pas le répertoire des testcases, c'est une erreur
if [ ! -d "${testdir}/input" ]; then
echo -e "${COLOR_RED}MISSING TESTCASES${COLOR_END}"
exit 1
fi
##############################################################################
failure=0
for input in "${testdir}/input/input"*.txt; do
n=${input##*input}
n=${n%%.txt}
if [ ! -z "$number" -a "$number" != "a" ]; then
if [ "$number" -ne "${n}" ]; then
continue
fi
fi
echo -e "${COLOR_YELLOW}${exe} < ${input}${COLOR_END}"
exec 3>&2 # fd 3 is stderr too
exec 2> "${testdir}/${result}${n}.time" # builtin time will write to a file, not stderr
TIMEFORMAT="${COLOR_CYAN}(real %2R user %2U sys %2S)${COLOR_END}"
# old templates use the environment variable OUTPUT_PATH
export OUTPUT_PATH=/dev/stdout
if [ $quiet ]; then
time ${exe} < "${input}" 2>&3 > "${testdir}/${result}${n}.txt"
else
time ${exe} < "${input}" 2>&3 | tee "${testdir}/${result}${n}.txt"
fi
exec 2>&3 # restore stderr
exec 3<&- # close fd 3
elapsed=$(< ${testdir}/${result}${n}.time)
echo -ne "${COLOR_PURPLE}"
compare "${testdir}/${result}${n}.txt" "${testdir}/output/output${n}.txt"
rc=$?
echo -ne "${COLOR_END}"
[ $rc -ne 0 ] && failure=1
if [ $rc -eq 0 ] ; then
echo -e "${COLOR_YELLOW}TESTCASE ${n} : ${COLOR_GREEN}SUCCESS${COLOR_END} ${elapsed}"
else
echo -e "${COLOR_YELLOW}TESTCASE ${n} : ${COLOR_RED}FAILURE${COLOR_END} ${elapsed}"
fi
echo
done
if [ $failure -eq 0 ] ; then
echo -e "${COLOR_GREEN}SUCCESS${COLOR_END}"
else
echo -e "${COLOR_RED}FAILURE${COLOR_END}"
fi
exit $failure
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
#! /usr/bin/env python3
# compte les testcases et challenges
import argparse
import os
import glob
import yaml
import pathlib
import datetime
domains = yaml.load(open(os.path.join(os.path.dirname(__file__), ".hr_conf.yaml")))["domains"]
parser = argparse.ArgumentParser(description='Count challenges')
parser.add_argument('--latest', help="check in testcases.tar.xz is up to date", action='store_true')
parser.add_argument('-m', '--missing', help="print missing testcases", action='store_true')
parser.add_argument('--copy', help="copy missing testcases", action='store_true')
parser.add_argument('-x', '--extra', help="print extra testcases", action='store_true')
parser.add_argument('--delete', help="remove extra testcases", action='store_true')
parser.add_argument('-v', '--verbose', help="verbose", action='store_true')
parser.add_argument('domain', nargs='*', help="domain", default=domains)
args = parser.parse_args()
if args.latest:
# find the latest modification/creation into the testcases folders
latest_t = 0
latest_f = None
for folder in ['testcases', 'testcases2']:
path = os.path.join(os.path.dirname(__file__), folder)
for f in pathlib.Path(path).glob('**/*'):
# if f.is_dir(): continue
st = f.lstat()
t = max(st.st_ctime, st.st_mtime)
# t = st.st_mtime
if t > latest_t:
latest_t = t
latest_f = f
# get the modification time for the archive
tar_f = os.path.join(os.path.dirname(__file__), "testcases.tar.xz")
if os.path.exists(tar_f):
tar_t = os.lstat(tar_f).st_mtime
else:
tar_t = 0
if args.verbose:
print("{} {}".format(datetime.datetime.fromtimestamp(latest_t).ctime(), latest_f))
print("{} {}".format(datetime.datetime.fromtimestamp(tar_t).ctime(), tar_f))
if tar_t < latest_t:
if args.verbose:
print("out of date")
exit(1)
exit(0)
challenges = set()
challenges_with_contest = set()
solutions = set()
count = 0
for d in args.domain:
for f in glob.iglob(os.path.join(os.path.dirname(__file__), d, "**"), recursive=True):
if os.path.isdir(f) or not os.path.exists(f):
continue
filename = os.path.basename(f)
# pas encore trouvé de solution élégante pour exclure les répertoires solution
if "/js10-create-a-button/" in f or "/js10-buttons-container/" in f or '/js10-binary-calculator/' in f: # noqa
continue
contest = os.path.relpath(f, os.path.dirname(__file__)).split(os.path.sep)
contest = contest[1] if contest[0] == "contests" else "master"
if filename == 'README.md' or filename == 'CMakeLists.txt':
continue
slug, ext = os.path.splitext(filename)
if ext == ".hpp" or ext == ".lst":
continue
challenges_with_contest.add((contest, slug))
challenges.add(slug)
solutions.add(filename)
count += 1
if args.verbose:
print(os.path.relpath(os.path.dirname(f)), slug)
if args.missing:
t1 = os.path.join(os.path.dirname(__file__),
"testcases", contest, slug + "-testcases.zip")
t2 = os.path.join(os.path.dirname(__file__),
"testcases2", contest, slug + "-testcases.zip")
if not os.path.exists(t1) and not os.path.exists(t2):
if ext in ['.sql', '.txt']:
# print("no needed:", os.path.relpath(os.path.dirname(f)), slug)
pass
else:
print("missing testcases:", os.path.relpath(os.path.dirname(f)), slug)
# os.system("cp -p offline/testcases/{}-testcases.zip testcases/".format(slug))
if args.copy:
os.link("offline/testcases/{}/{}-testcases.zip".format(contest, slug),
"testcases/{}/{}-testcases.zip".format(contest, slug))
if args.extra:
root = os.path.join(os.path.dirname(__file__), "testcases")
def nom(p):
p = os.path.relpath(p, root)
c = os.path.dirname(p)
p = os.path.basename(p)
assert p.find("-testcases.") > 0
return c, p[:p.find("-testcases.")]
t1 = os.path.join(root, "**", "*-testcases.*")
testcases = glob.glob(t1, recursive=True)
extras = set([nom(i) for i in testcases])
print("nb testcases:", len(extras))
print(*['/'.join(i) for i in set(extras) - challenges_with_contest])
if args.delete:
for i in testcases:
if nom(i) not in challenges_with_contest:
os.unlink(i)
print(len(challenges), len(challenges_with_contest), len(solutions), count)
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
# Makefile
# some handy commands
help:
@echo "make sync : readme, count and sync GitHub repo"
@echo "make github : sync + testcases"
@echo "make test : github + run tests (for GitHub repo source tree)"
@echo "make cloc : count lines of code"
sync:
@./hr_github.sh
github:
@./hr_github.sh -t
test:
@./hr_github.sh -a
build:
@mkdir -p build && cd build && cmake -DHACKERRANK_FP:BOOL=OFF -DCMAKE_BUILD_TYPE=Debug .. && make -j2
clean:
rm -rf build
cloc:
@cloc --exclude-dir=.vscode --vcs git
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// <bits/stdc++.h> de remplacement pour clang
// https://gcc.gnu.org/git/?p=gcc.git;a=blob;f=libstdc%2B%2B-v3/include/precompiled/stdc%2B%2B.h;h=adcd7bf312c849dffcaca871f91c4b12c9edcb29;hb=HEAD
#include <map>
#include <set>
#include <list>
#include <cmath>
#include <ctime>
#include <deque>
#include <queue>
#include <stack>
#include <string>
#include <bitset>
#include <cstdio>
#include <limits>
#include <vector>
#include <climits>
#include <cstring>
#include <cstdlib>
#include <fstream>
#include <numeric>
#include <sstream>
#include <iostream>
#include <algorithm>
#include <unordered_map>
#include <numeric>
#include <array>
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
#! /usr/bin/env python3
# liste les sections de interview-preparation-kit
# crée les liens symboliques sur les challenges déjà réalisés par ailleurs
import os
import requests
import requests_cache
import glob
import argparse
from hrinit import Colors
import sys
import logging
import datetime
from collections import defaultdict
class Interview:
def __init__(self):
self.basedir = os.path.dirname(__file__)
self.session = requests.Session()
def get(self, playlist, content=None):
if content:
url = 'https://www.hackerrank.com/rest/playlists/{}/{}'.format(playlist, content)
else:
url = 'https://www.hackerrank.com/rest/playlists/{}'.format(playlist)
return self.session.get(url).json()
def run(self, create_links=False):
# get challenges from algorithms, data-structures, tutorials
# (interview-preparation-kit ones are picked among them)
files = defaultdict(lambda: [])
for i in ['algorithms', 'data-structures', 'tutorials']:
pattern = os.path.join(self.basedir, i, "**", "*.*")
for j in glob.iglob(pattern, recursive=True):
if not os.path.isfile(j):
continue
path, name = os.path.split(j)
slug, lang = os.path.splitext(name)
if name == "CMakeLists.txt" or name == "README.md" or lang == ".hpp":
continue
files[slug].append(j)
# get the playlist of playlists
data = self.get('interview-preparation-kit')
name = data['name']
print("{}{}{}".format(Colors.BLUE, name, Colors.END))
for playlist in data['playlists']:
print(" {}{}{}".format(Colors.LIGHT_BLUE, playlist['name'], Colors.END))
section_dir = os.path.join(self.basedir, 'interview-preparation-kit', playlist['slug'])
os.makedirs(section_dir, exist_ok=True)
if playlist['videos_count'] != 0:
for video in self.get(playlist['slug'], 'videos')['videos']:
# Nota: duration field is in ISO-8601 duration format
print(" {}{:60}{} http://youtu.be/{}".format(
Colors.LIGHT_CYAN, video['title'], Colors.END, video['youtube_id']))
if playlist['challenges_count'] == 0:
print(" {}{}{}".format(Colors.LIGHT_RED, "no challenge", Colors.END))
else:
challenges = self.get(playlist['slug'], 'challenges')
for challenge in challenges['challenges']:
print(" {}{}{}".format(Colors.GREEN, challenge['name'], Colors.END))
slug = challenge['slug']
if slug in files:
for i in files[slug]:
dest = os.path.join(section_dir, os.path.basename(i))
src = os.path.relpath(i, section_dir)
if not os.path.exists(dest):
print(" LINK", src)
if create_links:
os.symlink(src, dest)
def set_logging(verbose):
""" set up a colorized logger """
if sys.stdout.isatty():
logging.addLevelName(logging.DEBUG, "\033[0;32m%s\033[0m" % logging.getLevelName(logging.DEBUG))
logging.addLevelName(logging.INFO, "\033[1;33m%s\033[0m" % logging.getLevelName(logging.INFO))
logging.addLevelName(logging.WARNING, "\033[1;35m%s\033[1;0m" % logging.getLevelName(logging.WARNING))
logging.addLevelName(logging.ERROR, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.ERROR))
if verbose:
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.DEBUG, datefmt='%H:%M:%S')
else:
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.ERROR, datefmt='%H:%M:%S')
def set_cache(refresh=False):
""" install the static Requests cache """
if refresh:
expire_after = datetime.timedelta(seconds=0)
else:
expire_after = datetime.timedelta(days=30)
requests_cache.install_cache(
cache_name=os.path.join(os.path.dirname(__file__), "cache"),
allowable_methods=('GET', 'POST'), expire_after=expire_after)
requests_cache.core.remove_expired_responses()
def main():
parser = argparse.ArgumentParser(description='Offliner for Interview Preparation Kit')
parser.add_argument("-v", "--verbose", help="increase verbosity", action='store_true')
parser.add_argument('-R', '--refresh', help="refresh the catalogs (do not use cache)", action="store_true") # noqa
parser.add_argument('-l', '--links', help="create symlinks", action='store_true')
args = parser.parse_args()
set_logging(args.verbose)
set_cache(args.refresh)
interview = Interview()
interview.run(args.links)
if __name__ == '__main__':
main()
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
#! /usr/bin/env python3
import sys
import requests
import requests_cache
import os
import subprocess
import json
import argparse
import platform
import re
import email.utils
import datetime
import time
import logging
class Colors:
""" Terminal colors """
BLACK = "\033[0;30m"
RED = "\033[0;31m"
GREEN = "\033[0;32m"
BROWN = "\033[0;33m"
BLUE = "\033[0;34m"
PURPLE = "\033[0;35m"
CYAN = "\033[0;36m"
LIGHT_GRAY = "\033[0;37m"
DARK_GRAY = "\033[1;30m"
LIGHT_RED = "\033[1;31m"
LIGHT_GREEN = "\033[1;32m"
YELLOW = "\033[1;33m"
LIGHT_BLUE = "\033[1;34m"
LIGHT_PURPLE = "\033[1;35m"
LIGHT_CYAN = "\033[1;36m"
LIGHT_WHITE = "\033[1;37m"
BOLD = "\033[1m"
FAINT = "\033[2m"
ITALIC = "\033[3m"
UNDERLINE = "\033[4m"
BLINK = "\033[5m"
NEGATIVE = "\033[7m"
CROSSED = "\033[9m"
END = "\033[0m"
# cancel SGR codes if we don't write to a terminal
if not __import__("sys").stdout.isatty():
for c in dir():
if isinstance(c, str) and c[0:2] != "__":
locals()[c] = ""
class HackerRankParser():
def __init__(self, debug=False, rootdir=None):
self.debug = debug
if rootdir is None:
self.rootdir = os.path.dirname(__file__)
else:
self.rootdir = rootdir
self.model = None
self.path = None
self.contest = None
self.key = None
def feed(self, data, ignore_path=False):
if self.debug:
with open("model.json", "w") as f:
f.write(data)
print("DEBUG: write model.json")
data = json.loads(data)
if data['status'] is True:
self.model = m = data['model']
if m['track'] is None:
if m["primary_contest"] is None:
# challenge is not categorized (not a contest, no track)
if ignore_path:
self.path = "master"
self.path_name = "master"
else:
print("Cannot determine path for challenge {}".format(m['name']))
exit()
else:
self.path = os.path.join("contests", m["contest_slug"])
self.path_name = "{}".format(m["primary_contest"]["name"])
else:
self.path = os.path.join(m["track"]["track_slug"], m["track"]["slug"])
self.path_name = "{} > {}".format(m["track"]["track_name"], m["track"]["name"])
self.contest = m['contest_slug']
self.key = m['slug']
if 'id' in m:
self.challenge_id = m['id']
else:
self.challenge_id = None
if m['contest_slug'] == "master":
self.url = "https://www.hackerrank.com/challenges/{}/problem".format(self.key)
self.url2 = None
if 'primary_contest' in m:
if m['primary_contest'] and 'slug' in m['primary_contest']:
self.url2 = "https://www.hackerrank.com/contests/{}/challenges/{}".format(m['primary_contest']['slug'], self.key) # noqa
else:
self.url = "https://www.hackerrank.com/contests/{}/challenges/{}".format(self.contest, self.key) # noqa
self.url2 = None
def info(self):
print(Colors.LIGHT_BLUE + "key :" + Colors.END, self.model['slug'])
print(Colors.LIGHT_BLUE + "name :" + Colors.END, self.model['name'])
print(Colors.LIGHT_BLUE + "domain :" + Colors.END, self.path_name)
print(Colors.LIGHT_BLUE + "preview :" + Colors.END, self.model['preview'])
print(Colors.LIGHT_BLUE + "lang :" + Colors.END, ','.join(self.model['languages']))
def gen_stub(self, lang, overwrite=False, hpp=False, editor=True, add_test=True):
""" create a file based on the hackerrank template with a significant header """
EXTENSIONS = {"cpp": "cpp",
"cpp14": "cpp",
"c": "c",
"python3": "py",
"python": "py",
"haskell": "hs",
"bash": "sh",
"java": "java",
"java8": "java",
"javascript": "js",
"perl": "pl",
"lua": "lua",
"text": "txt",
"oracle": "sql"}
PREFERED = ['python3', 'cpp14', 'c', 'haskell',
'bash', 'oracle', 'text', 'java8',
'python', 'javascript']
# auto choose the language
if lang == "*":
if 'languages' in self.model:
languages = self.model['languages']
if len(languages) == 1:
lang = languages[0]
else:
for i in PREFERED:
if i in languages:
lang = i
break
else:
print("Cannot choose automatically a language:", ' '.join(languages))
return
else:
print('Model unknown: no languages[]')
return
extension = EXTENSIONS.get(lang, lang)
os.makedirs(os.path.join(self.rootdir, self.path), exist_ok=True)
filename = os.path.join(self.rootdir, self.path, self.key + "." + extension)
if not overwrite and os.path.exists(filename):
print("File exists:", filename)
return
cmake = os.path.join(self.rootdir, self.path, "CMakeLists.txt")
def write_header(f, comment, add_skeliton=True):
def line(text=None):
if text is None:
f.write('\n')
else:
f.write(comment + text + '\n')
def skeliton(what):
text = self.model.get(lang + "_" + what, "").strip()
if text != "":
if what.find("_tail") != -1:
line()
line('(' + what + ') ' + '-' * 70)
f.write(text + '\n')
if what.find("_head") != -1:
line('(' + what + ') ' + '-' * 70)
line()
return True
else:
return False
line(self.path_name + " > " + self.model['name'])
if 'preview' in self.model:
line(self.model['preview'])
line('')
line('{}'.format(self.url))
if self.url2:
line('{}'.format(self.url2))
if self.challenge_id:
line('challenge id: {}'.format(self.challenge_id))
line('')
line()
if add_skeliton:
skeliton("skeliton_head") or skeliton("template_head")
skeliton("template")
skeliton("skeliton_tail") or skeliton("template_tail")
# langages avec testeur
if lang == "cpp" or lang == "cpp14" or lang == "c":
if hpp:
filename_hpp = os.path.splitext(filename)[0] + ".hpp"
with open(filename, "wt") as f:
write_header(f, '// ', add_skeliton=False)
f.write('\n')
f.write('#include "{}"\n'.format(os.path.basename(filename_hpp)))
with open(filename_hpp, "wt") as f:
write_header(f, '// ', add_skeliton=True)
else:
with open(filename, "wt") as f:
write_header(f, '// ')
with open(cmake, "at") as f:
if add_test:
f.write("add_hackerrank({} {}.{})\n".format(self.key, self.key, lang[:3]))
else:
f.write("add_executable({} {}.{})\n".format(self.key, self.key, lang[:3]))
elif lang == "python3" or lang == "python":
with open(filename, "wt") as f:
write_header(f, '# ')
with open(cmake, "at") as f:
if add_test:
f.write("add_hackerrank_py({}.py)\n".format(self.key))
else:
pass
elif lang == "haskell":
with open(filename, "wt") as f:
write_header(f, '-- ')
with open(cmake, "at") as f:
f.write("#add_hackerrank_hs({}.hs)\n".format(self.key))
elif lang == "bash":
with open(filename, "wt") as f:
write_header(f, '# ')
with open(cmake, "at") as f:
f.write("add_hackerrank_shell({}.sh)\n".format(self.key))
elif lang == "java" or lang == "java8":
with open(filename, "wt") as f:
write_header(f, '// ')
with open(cmake, "at") as f:
f.write("add_hackerrank_java({}.java)\n".format(self.key))
elif lang == "javascript":
with open(filename, "wt") as f:
write_header(f, '// ')
with open(cmake, "at") as f:
f.write("add_hackerrank_js({}.js)\n".format(self.key))
# langages sans testeur
elif lang == "text" or lang == "perl":
with open(filename, "wt") as f:
write_header(f, '# ')
elif lang == "oracle" or lang == "lua":
with open(filename, "wt") as f:
write_header(f, '-- ')
else:
print("Unknown language:", lang)
return
filename = os.path.relpath(filename)
print("File created. Use « code {} » to edit it.".format(filename))
with open(os.path.join(self.rootdir, "history.md"), "at") as f:
f.write("{}|{}|{}|{}|[solution]({}) [web]({})\n".format(
self.path, self.key, lang, time.strftime("%c %z"),
os.path.join(self.path, self.key + "." + extension),
self.url))
if editor:
if 'VSCODE_PID' in os.environ:
if platform.system() == 'Windows':
subprocess.check_call(["code.cmd", filename])
else:
subprocess.check_call(["code", filename])
def download(self,
dest_dir="testcases",
url="download_testcases",
suffix="-testcases.zip",
content_type="application/zip",
overwrite=False):
""" download test cases and problem statement """
def my_parsedate(text):
return datetime.datetime(*email.utils.parsedate(text)[:6])
testcases_dir = os.path.join(self.rootdir, dest_dir, self.contest)
os.makedirs(testcases_dir, exist_ok=True)
testcase_file = os.path.join(testcases_dir, self.key + suffix)
testcase_err = os.path.splitext(testcase_file)[0] + ".err"
if overwrite or (not os.path.exists(testcase_file) and not os.path.exists(testcase_err)): # noqa
offline = os.path.join(os.path.dirname(__file__),
"offline", dest_dir, self.contest,
self.key + suffix)
if not overwrite and os.path.exists(offline):
print("link", os.path.relpath(offline), os.path.relpath(testcase_file))
os.link(offline, testcase_file)
pass
else:
url = "https://www.hackerrank.com/rest/contests/{}/challenges/{}/{}".format(self.contest, self.key, url) # noqa
# download resource (statement or testcases: no cache)
with requests_cache.disabled():
r = requests.get(url, allow_redirects=True)
if r.status_code == 200:
if r.headers['content-type'] == content_type:
with open(testcase_file, "wb") as f:
f.write(r.content)
if r.headers.get('last-modified'):
d = my_parsedate(r.headers['last-modified'])
ts = d.timestamp()
os.utime(testcase_file, (ts, ts))
print("Download {}: {} bytes".format(dest_dir, len(r.content)))
else:
print("{}: download error {} {} {}".format(dest_dir, self.key, r.status_code, r.text))
if r.status_code == 404:
with open(testcase_err, "w"):
pass
testcase_file = None
return testcase_file
def downloads(self, overwrite=False, testcases=True, statement=False):
testcases_file, statement_file = None, None
if testcases:
testcases_file = self.download(overwrite=overwrite)
if statement:
statement_file = self.download(dest_dir="statements",
url="download_pdf?language=English",
suffix=".pdf",
content_type="application/pdf",
overwrite=overwrite)
return testcases_file, statement_file
def set_logging(verbose):
""" set up a colorized logger """
if sys.stdout.isatty():
logging.addLevelName(logging.DEBUG, "\033[0;32m%s\033[0m" % logging.getLevelName(logging.DEBUG))
logging.addLevelName(logging.INFO, "\033[1;33m%s\033[0m" % logging.getLevelName(logging.INFO))
logging.addLevelName(logging.WARNING, "\033[1;35m%s\033[1;0m" % logging.getLevelName(logging.WARNING))
logging.addLevelName(logging.ERROR, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.ERROR))
if verbose:
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.DEBUG, datefmt='%H:%M:%S')
else:
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.ERROR, datefmt='%H:%M:%S')
def set_cache(refresh=False):
""" install the static Requests cache """
if refresh:
expire_after = datetime.timedelta(seconds=0)
else:
expire_after = datetime.timedelta(days=30)
requests_cache.install_cache(
cache_name=os.path.join(os.path.dirname(__file__), "cache"),
allowable_methods=('GET', 'POST'), expire_after=expire_after)
requests_cache.core.remove_expired_responses()
def main():
# grabbed from Javascript function we_are_hiring() in the www.hackerrank.com pages
lines = [
Colors.GREEN,
"===============================================================================",
",--. ,--. ,--. ,------. ,--. ",
"| '--' | ,--,--. ,---.| |,-. ,---. ,--.--.| .--. ' ,--,--.,--,--, | |,-. ",
"| .--. |' ,-. || .--'| /| .-. :| .--'| '--'.'' ,-. || \\| / ",
"| | | |\\ '-' |\\ `--.| \\ \\\\ --.| | | |\\ \\ \\ '-' || || || \\ \\ ",
"`--' `--' `--`--' `---'`--'`--'`----'`--' `--' '--' `--`--'`--''--'`--'`--' ",
"===============================================================================",
Colors.END,
]
for i in lines:
print(i)
parser = argparse.ArgumentParser(
description='Intialize a ' + Colors.LIGHT_BLUE + 'HackerRank' + Colors.END + ' challenge.')
parser.add_argument('url', help="Challenge URL")
parser.add_argument('-v', '--verbose', help="Verbose mode", action='store_true')
parser.add_argument('-d', '--debug', help="Debug mode", action='store_true')
parser.add_argument('-f', '--force', help="Force overwrite", action='store_true')
parser.add_argument('-X', dest="force_cpp", help="Force C++", action='store_true')
parser.add_argument('-H', dest="force_hpp", help="Force C++ with include", action='store_true')
parser.add_argument('-C', dest="force_c", help="Force C", action='store_true')
parser.add_argument('-l', dest="lang", metavar="LANG", help="Language selection", default="*")
parser.add_argument('-R', "--refresh", help="force refresh cache", action='store_true')
parser.add_argument("--no-cache", help="disable Requests cache", action='store_true')
args = parser.parse_args()
set_logging(args.verbose)
if not args.no_cache:
set_cache(args.refresh)
if args.force_cpp or args.force_hpp:
args.lang = "cpp14"
if args.force_c:
args.lang = "c"
alt_path = None
alt_path_name = None
alt_url = None
data = ""
if args.url.startswith('http'):
# challenge linked from the interview preparation kit ?
t = re.search(r"www\.hackerrank\.com/challenges/([a-z\-\d]+)/problem\?h_l=playlist&slugs%5B%5D=interview&slugs%5B%5D=([a-z\-\d]+)&slugs%5B%5D=([a-z\-\d]+)", args.url) # noqa
if t:
contest = "master"
challenge = t.group(1)
alt_path = os.path.join(t.group(2), t.group(3))
# retrieve the name of the interview section
url = "https://www.hackerrank.com/rest/playlists/" + t.group(2)
r = requests.get(url)
if r.status_code == 200:
data = json.loads(r.content)
name1 = data['name']
name2 = [i['name'] for i in data['playlists'] if i['slug'] == t.group(3)][0]
alt_path_name = "{} > {}".format(name1, name2)
else:
alt_path_name = alt_path
alt_url = "https://www.hackerrank.com/challenges/{}/problem?h_l=playlist&slugs%5B%5D%5B%5D=interview&slugs%5B%5D%5B%5D={}&slugs%5B%5D%5B%5D={}".format(t.group(1), t.group(2), t.group(3)) # noqa
else:
# practice challenge ?
t = re.search(r"www\.hackerrank\.com/challenges/([^/]+)", args.url)
if t:
contest = "master"
challenge = t.group(1)
else:
# contest challenge ?
t = re.search(r"www\.hackerrank\.com/contests/([^/]+)/challenges/([\w\d\-]+)", args.url) # noqa
if t:
contest = t.group(1)
challenge = t.group(2)
# REST api to get challenge model
url = "https://www.hackerrank.com/rest/contests/{}/challenges/{}".format(contest, challenge)
r = requests.get(url)
if r.status_code == 200:
data = r.text
elif args.url.find(':') != -1:
contest, _, challenge = args.url.partition(':')
url = "https://www.hackerrank.com/rest/contests/{}/challenges/{}".format(contest, challenge)
print('URL', contest, challenge, url)
r = requests.get(url)
if r.status_code == 200:
data = r.text
else:
with open(args.url) as f:
data = f.read()
parser = HackerRankParser(args.debug)
parser.feed(data, True)
# trick to manage "interview-preparation-kit" only challenge
if alt_path:
parser.path = alt_path
parser.path_name = alt_path_name
parser.url = alt_url
parser.info()
parser.gen_stub(args.lang, args.force, args.force_hpp)
parser.downloads(args.force)
if __name__ == '__main__':
main()
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
#! /usr/bin/env python3
# télécharge les contests, domains et challenges de HackerRank
# crée une image locale permettant la résolution offline ainsi que la création des tables d'index
import json
import os
import requests
from hrinit import HackerRankParser
import argparse
import requests_cache
import sys
import logging
import datetime
def get_path(m):
# le modèle d'un chellenge de contest
# - contest (ex: projecteuler)
# - slug (ex: euler001)
# et seront stockés avec comme aroborescence <contest>/<slug>.<extenion>
# le modèle d'un challenge de practice
# - contest: master
# - slug: clé
# - track: domaine/sous-domaine (ex: algorithms/warmup)
# et a parfois
# - primary_contest: contest d'origine
if 'track' not in m or m['track'] is None:
path = os.path.join("models", m["contest_slug"])
else:
path = os.path.join("models",
m["contest_slug"], m["track"]["track_slug"], m["track"]["slug"])
return path
class hackerrank:
def __init__(self, download_challenges=False, reload_catalogs=False):
self.session = requests.Session()
self.download_challenges = download_challenges
self.reload_catalogs = reload_catalogs
self.copy_testcases = False
def set_copy_testcases(self):
self.copy_testcases = True
self.download_challenges = True
#####
def retrieve(self, contest, challenge, filename):
if os.path.exists(filename):
with open(filename, "rb") as f:
# print("----> ", challenge, filename)
return f.read()
url = "https://www.hackerrank.com/rest/contests/{}/challenges/{}".format(contest, challenge)
with requests_cache.disabled():
r = self.session.get(url)
if r.status_code == 200:
with open(filename, "wb") as f:
f.write(r.content)
return r.content
######
def mirror(self, models):
""" mirror challenges listed into models """
for m in models:
# kind: code database
# if m.get('kind') != 'code':
# print(m.get('kind'))
# continue
# si manquant: video (cf. cracking the code interview)
if 'kind' not in m:
print("missing kind", m)
continue
m["contest_slug"] # master
m["slug"] # solve-me-first
m["name"] # Solve Me First
m["preview"] # This is an easy challenge ...
if 'track' in m:
if m['track']:
m["track"]["track_slug"] # algorithms
m["track"]["track_name"] # Algorithms
m["track"]["slug"] # warmup
m["track"]["name"] # Warmup
if 'track' in m and m["track"]:
print("=======> ", m["track"]["track_name"], ">", m["track"]["name"], ">", m["name"])
else:
print("=======> ", m["name"])
if not self.download_challenges:
continue
path = get_path(m)
os.makedirs(path, exist_ok=True)
data = self.retrieve(m["contest_slug"], m["slug"], os.path.join(path, m["slug"] + ".json"))
if data is None:
print("NOT AVAILABLE", m["slug"], m["contest_slug"])
continue
hr = HackerRankParser(rootdir=".")
hr.feed(data, True)
testcases_file, _ = hr.downloads(statement=True, testcases=True)
if self.copy_testcases:
dest = os.path.join(os.path.dirname(__file__), testcases_file)
if os.path.exists(testcases_file) and not os.path.exists(dest):
print("link", dest)
os.link(testcases_file, dest)
def get(self, url, unused):
print(">", url)
return self.session.get(url).json()
# def get(self, url, cache_file):
# cache_file = os.path.join('cache', cache_file)
# os.makedirs(os.path.dirname(cache_file), exist_ok=True)
# if not self.reload_catalogs and os.path.exists(cache_file):
# with open(cache_file, "rb") as f:
# return json.loads(f.read())
# else:
# print(">", url)
# r = self.session.get(url)
# if r.status_code == 200:
# with open(cache_file, "wb") as f:
# f.write(r.content)
# return json.loads(r.content)
# else:
# print("error", r, url)
def get_tracks(self, contest, my_caterogies=None):
url = 'https://www.hackerrank.com/rest/contests/{}/tracks'.format(contest)
fn = '{}_tracks.json'.format(contest)
tracks = self.get(url, fn)
for track in tracks['models']:
if my_caterogies is not None and track['slug'] not in my_caterogies:
continue
url = 'https://www.hackerrank.com/rest/contests/{}/tracks/{}/chapters'.format(contest, track['slug']) # noqa
fn = '{}_{}.json'.format(contest, track['slug'])
data = self.get(url, fn)
chapters = data['models']
track = {"models": [],
"id": track['id'],
"slug": track['slug'],
"name": track['name'],
"description": track['descriptions']}
for chapter in chapters:
data = self.get_chapter(contest, track['slug'], chapter)
track["models"].extend(data)
os.makedirs("contests", exist_ok=True)
filename = os.path.join("contests", "{}_{}.json".format(contest, track['slug']))
with open(filename, "wt") as f:
json.dump(track, f)
self.mirror(track["models"])
def get_chapter(self, contest, track, chapter):
slug = chapter['slug'] # warmup
# name = chapter['name'] # Warmup
count = int(chapter['challenges_count']) # 10
models = []
limit = 50
for i in range(0, count, limit):
url = "https://www.hackerrank.com/rest/contests/{}/categories/{}%7C{}/challenges?offset={}&limit={}".format(contest, track, slug, i, limit) # noqa
fn = '{}_{}_{}_{:03d}.json'.format(contest, track, slug, i)
data = self.get(url, fn)
models.extend(data['models'])
return models
def get_contest(self, contest):
url = 'https://www.hackerrank.com/rest/contests/' + contest
fn = contest + '.json'
data = self.get(url, fn)
model = data['model']
count = model['challenges_count'] # 210
if count is None:
return
models = []
limit = 50
for offset in range(0, count, limit):
url = "https://www.hackerrank.com/rest/contests/{}/challenges?offset={}&limit={}".format(contest, offset, limit) # noqa
fn = '{}_{:03d}.json'.format(contest, offset)
data = self.get(url, fn)
models.extend(data['models'])
track = {"models": models,
"id": model['id'],
"slug": model['slug'],
"name": model['name'],
"description": model['description']}
os.makedirs("contests", exist_ok=True)
with open(os.path.join("contests", "{}.json".format(contest)), "wt") as f:
json.dump(track, f)
print("====>", contest)
self.mirror(models)
def all_contests(self):
archived = self.get("https://www.hackerrank.com/rest/contests/archived?offset=0&limit=500&contest_slug=active", "contests_archived.json") # noqa
upcoming = self.get("https://www.hackerrank.com/rest/contests/upcoming", "contests_upcoming.json") # noqa
contests = set()
for c in upcoming['models']:
slug = c['slug']
contests.add(slug)
self.get_contest(slug)
for c in archived['models']:
slug = c['slug']
if slug not in contests:
contests.add(slug)
self.get_contest(slug)
def all_tracks(self):
tracks = self.get("https://www.hackerrank.com/rest/contests/master/tracks", "master_tracks.json") # noqa
t = list(t['slug'] for t in tracks['models'])
self.get_tracks("master", t)
def tutorial(self, filename):
with open(filename, "rt") as f:
data = json.load(f)
fn = None
for m in data['models']:
if 'contest_slug' in m and 'track' in m:
fn = os.path.join("contests", '{}_tutorials_{}.json'.format(m['contest_slug'], m['track']['slug'])) # noqa
break
if fn is None:
print("Bad file:", filename)
if os.path.exists(fn):
os.unlink(fn)
os.link(filename, fn)
self.mirror(data['models'])
def interview(self):
models = []
stack = ['interview-preparation-kit']
while len(stack) > 0:
s = stack.pop()
data = self.get('https://www.hackerrank.com/rest/playlists/{}'.format(s), 'playlist_{}.json'.format(s)) # noqa
for playlist in data['playlists']:
stack.append(playlist['slug'])
if data['challenges_count'] > 0:
d = self.get('https://www.hackerrank.com/rest/playlists/{}/challenges'.format(s), 'playlist_{}_challenges.json'.format(s)) # noqa
print("Interview:", d['name'])
self.mirror(d['challenges'])
x = d['challenges']
x.extend(models)
models = x
track = {"models": models,
"id": 0,
"slug": 'interview-preparation-kit',
"name": 'interview-preparation-kit',
"description": 'interview-preparation-kit'}
filename = os.path.join("contests", "interview-preparation-kit.json")
with open(filename, "wt") as f:
json.dump(track, f)
def set_logging(verbose):
""" set up a colorized logger """
if sys.stdout.isatty():
logging.addLevelName(logging.DEBUG, "\033[0;32m%s\033[0m" % logging.getLevelName(logging.DEBUG))
logging.addLevelName(logging.INFO, "\033[1;33m%s\033[0m" % logging.getLevelName(logging.INFO))
logging.addLevelName(logging.WARNING, "\033[1;35m%s\033[1;0m" % logging.getLevelName(logging.WARNING))
logging.addLevelName(logging.ERROR, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.ERROR))
if verbose:
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.DEBUG, datefmt='%H:%M:%S')
else:
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.ERROR, datefmt='%H:%M:%S')
def set_cache(refresh=False):
""" install the static Requests cache """
if refresh:
expire_after = datetime.timedelta(seconds=0)
else:
expire_after = datetime.timedelta(days=30)
requests_cache.install_cache(
cache_name=os.path.join(os.path.dirname(__file__), "cache"),
allowable_methods=('GET', 'POST'), expire_after=expire_after)
requests_cache.core.remove_expired_responses()
def offline():
parser = argparse.ArgumentParser(description='Offliner')
parser.add_argument("-v", "--verbose", help="increase verbosity", action='store_true')
parser.add_argument('--contests', help="download all contests", action='store_true')
parser.add_argument('--tracks', help="download all tracks", action='store_true')
parser.add_argument('-c', '--contest', help="download contest")
parser.add_argument('-t', '--track', nargs='*', help="download a master track")
parser.add_argument('-m', '--mirror', help="mirror challenges", action='store_true')
parser.add_argument('-R', '--refresh', help="refresh the catalogs (do not use cache)",
action='store_true')
parser.add_argument('--tutorials', help="download tutorials", action='store_true')
parser.add_argument('--interview', help="download interview-preparation-kit",
action='store_true')
parser.add_argument('--copy-testcases', help="copy testcases for archive", action='store_true')
args = parser.parse_args()
set_logging(args.verbose)
set_cache(args.refresh)
x = hackerrank(args.mirror, args.refresh)
if args.copy_testcases:
x.set_copy_testcases()
if args.contests:
x.all_contests()
if args.tracks:
x.all_tracks()
if args.contest:
x.get_contest(args.contest)
if args.track:
x.get_tracks("master", args.track)
if args.tutorials:
x.tutorial("tutorials/cracking-the-coding-interview.json")
x.tutorial("tutorials/30-days-of-code.json")
x.tutorial("tutorials/10-days-of-javascript.json")
x.tutorial("tutorials/10-days-of-statistics.json")
if args.interview:
x.interview()
if False:
x.get_tracks("master", ["algorithms", "data-structures", "mathematics",
"cpp", "python", "shell", "sql", "security",
"fp"])
x.get_contest("projecteuler")
x.get_tracks("master", ["databases", "general-programming", "ai", "regex"])
# x.get_contest("infinitum10")
# x.get_contest("infinitum18")
# x.get_contest("openbracket-2017")
if __name__ == '__main__':
__file__ = os.path.abspath(__file__)
os.chdir(os.path.join(os.path.dirname(__file__), "offline"))
offline()
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
pyyaml
requests
requests_cache
flake8
numpy
scipy
scikit-learn
#cpp-coveralls
#coveralls
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Compare a HackerRank testcase result fairly with decimal numbers,
taking into account up to DECIMALS digits (and ignoring the following).
"""
from __future__ import print_function
import sys
import re
import itertools
DECIMALS = 6
def main():
if len(sys.argv) != 3:
print("Usage: compare.py file1 file2")
sys.exit(2)
try:
f = open(sys.argv[1], "r")
g = open(sys.argv[2], "r")
float_pattern = re.compile(r'(\d+\.\d+)')
def float_fmt(floats):
def fmt(m):
floats.append(float(m.group(0)))
return "FLOAT"
return fmt
n = 0
for i, j in itertools.zip_longest(f, g, fillvalue=''):
n += 1
# ignore line endings
i = i.rstrip()
j = j.rstrip()
if len(i) > 500 or len(j) > 500:
# line is too long, do not test floats
# ... hard to find the ideal comparison that works everywhere ...
floats_are_equal = True
i_new = i
j_new = j
else:
# when a float number is found, adjust decimal digits
i_floats, j_floats = [], []
i_new = float_pattern.sub(float_fmt(i_floats), i)
j_new = float_pattern.sub(float_fmt(j_floats), j)
floats_are_equal = len(i_floats) == len(j_floats)
if floats_are_equal:
for a, b in zip(i_floats, j_floats):
if a == b:
continue
if abs(a - b) / abs(a + b) > 10 ** -DECIMALS:
floats_are_equal = False
break
if i_new != j_new or not floats_are_equal:
# a difference is found
print('{}< {}'.format(n, i))
print('{}> {}'.format(n, j))
sys.exit(1)
# everything's fine!
sys.exit(0)
except Exception as f:
print(f, file=sys.stderr)
sys.exit(2)
if __name__ == '__main__':
main()
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
#! /usr/bin/env python3
# (re)construit les fichiers README.md de description des challenges
import json
import glob
import os
import io
from collections import namedtuple
import yaml
# tuple
Slug = namedtuple('Slug', ['order', # numéro pour maintenir l'ordre
'link', # lien markdown vers le fichier local
'track', # lien markdown vers la sous-section du site HackerRank
'domain', # lien markdown vers le domaine sur www.hackerrank.com
'main_track', # identifiant du domaine (pour retrouver la description)
'url']) # url vers le site web hackerrank
# globals
models = {} # liste des challenges indexés par (contest, slug)
descriptions = {}
playlists = {}
def get_models():
""" charge les définitions des challenges """
# les playlists
for i in glob.iglob(os.path.join("offline", "playlists", "*.json")):
with open(i, "r") as f:
data = json.load(f)
playlists[data['slug']] = data
# les contests (y compris master_<domain>)
order = 0
for i in glob.iglob(os.path.join("offline", "contests", "*.json")):
with open(i, "r") as f:
data = json.load(f)
# la description d'un contest
if 'name' in data:
desc = (data['description'] or '').partition('<br')[0]
descriptions[data['slug']] = {'name': data['name'],
'description': desc}
# pour tous les challenges dans un contest
for m in data['models']:
if 'contest_slug' not in m:
continue
order += 1
m['order'] = order # ajoute un numéro pour maintenir l'ordre des chapters
if m['contest_slug'] == 'projecteuler':
m['order'] -= 10000 # met le ProjectEuler+ en tête des contests
models[(m['contest_slug'], m['slug'])] = m
def do_domain(domain):
slugs = {}
#
# STEP 1 : analyse un répertoire à la recherche des challenges (récursivement)
#
for i in glob.iglob(os.path.join(domain, "**/*"), recursive=True):
# pas encore trouvé de solution élégante pour exclure les répertoires solution
if "/js10-create-a-button/" in i or "/js10-buttons-container/" in i or '/js10-binary-calculator/' in i: # noqa
continue
if os.path.isdir(i):
# crée aussi les README.md dans chaque sous-domaine
do_domain(i)
if not os.path.isfile(i):
continue
name = os.path.basename(i)
if name == 'CMakeLists.txt':
continue
if name == 'README.md':
continue
contest_challenge, lang = os.path.splitext(name)
langs = {'.hs': 'Haskell',
'.erl': 'Erlang',
'.py': 'Python',
'.c': 'C',
'.cpp': 'C++',
'.sh': 'bash',
'.sql': 'SQL',
'.txt': 'text',
'.java': 'Java',
'.js': 'Javascript',
'.html': 'HTML',
'.pl': 'Perl'}
lang = langs.get(lang)
if not lang:
# nominal: fichier à ignorer
# print("LANG NOT FOUND:", name, os.path.splitext(name))
continue
contest = 'master' # par défaut
zz = os.path.split(os.path.dirname(i))
if zz[0] == "contests":
contest = zz[1]
if (contest, contest_challenge) not in models:
print("SLUG NOT FOUND:", name, contest_challenge, lang, i, domain)
continue
source = os.path.relpath(os.path.realpath(i), start=domain)
if os.path.islink(source):
print(source)
exit()
r = slugs.get((contest, contest_challenge))
if r is None:
m = models[(contest, contest_challenge)]
m['onboarding'] = None
if contest != "master":
url = 'https://www.hackerrank.com/contests/{}/challenges/{}'.format(contest, contest_challenge) # noqa
else:
url = 'https://www.hackerrank.com/challenges/{}'.format(contest_challenge)
if zz[0] == "interview-preparation-kit":
# print("--->", zz)
title = "title"
track = "track"
main_track = "main_track"
if zz[0] in playlists:
playlist = playlists[zz[0]]
chapter = None
for i, c in enumerate(playlist['playlists']):
if c['slug'] == zz[1]:
chapter = c
m['order'] = i + 100000000
break
title = "[{}]({})".format(
playlist['name'],
"https://www.hackerrank.com/interview/{}".format(zz[0]))
track = "[{}]({})".format(
chapter['name'],
"https://www.hackerrank.com/interview/{}/{}/challenges".format(zz[0], zz[1])) # noqa
url = "https://www.hackerrank.com/challenges/{}/problem?h_l=playlist&slugs%5B%5D=interview&slugs%5B%5D={}&slugs%5B%5D={}".format( # noqa
contest_challenge,
zz[0],
zz[1])
elif m['track'] is not None:
title = "[{}]({})".format(
m['track']['track_name'],
"https://www.hackerrank.com/domains/" + m['track']['track_slug'])
track = "[{}]({}) > [{}]({})".format(
m['track']['track_name'],
"https://www.hackerrank.com/domains/" + m['track']['track_slug'],
m['track']['name'],
"https://www.hackerrank.com/domains/" +
m['track']['track_slug'] + "/" + m['track']['slug'])
track = "[{}]({})".format(
m['track']['name'],
"https://www.hackerrank.com/domains/" +
m['track']['track_slug'] + "/" + m['track']['slug'])
main_track = m['track']['track_slug']
else:
x = descriptions.get(m['contest_slug'])['name']
title = "[{}]({})".format(x, "https://www.hackerrank.com/contests/" +
m['contest_slug'])
track = ""
main_track = m['contest_slug']
r = Slug(order=m['order'],
link=['[{}]({})'.format(lang, source)],
domain=title,
main_track=main_track,
track=track,
url=url)
slugs[(contest, contest_challenge)] = r
else:
r.link.append('[{}]({})'.format(lang, source))
order = [(v.order, contest_challenge) for contest_challenge, v in slugs.items()]
order.sort()
#
# STEP 2 : crée l'index des challenges en respectant l'ordre
#
with io.StringIO() as out:
if os.path.exists(os.path.join(domain, "README.md.in")):
with open(os.path.join(domain, "README.md.in")) as f:
out.write(f.read())
prev_contest = None
prev_domain = None
prev_track = None
for _, contest_challenge in order:
m = models[contest_challenge]
s = slugs[contest_challenge]
if prev_domain != s.domain:
prev_domain = s.domain
print("", file=out)
print("### " + prev_domain, file=out)
if s.main_track in descriptions:
print(descriptions[s.main_track]['description'], file=out)
print("", file=out)
if prev_track != s.track or prev_contest != contest_challenge[0]:
prev_contest = contest_challenge[0]
prev_track = s.track
if prev_track != "":
print("", file=out)
print("#### " + prev_track, file=out)
print("", file=out)
print("Name | Preview | Code | Difficulty", file=out)
print("---- | ------- | ---- | ----------", file=out)
links = ' '.join(sorted(s.link))
preview = m['preview']
if not preview:
preview = m['name']
preview = preview.replace("\n", " ").strip()
print('[%s](%s)|%s|%s|%s' % (m['name'], s.url, preview, links,
m['difficulty_name']), file=out)
print("", file=out)
md = out.getvalue()
#
# STEP 3 : met à jour le fichier README.md
#
fn = os.path.join(domain, "README.md")
if len(md.strip()) == 0:
if os.path.exists(fn):
print("delete", fn)
os.unlink(fn)
elif not os.path.exists(fn) or md != open(fn, "rt").read():
print("rewrite", fn)
open(fn, "wt").write(md)
def main():
domains = yaml.load(open(os.path.join(os.path.dirname(__file__), ".hr_conf.yaml")))["domains"]
os.chdir(os.path.dirname(__file__))
get_models()
for domain in domains:
do_domain(domain)
do_domain("coding-dojo")
if __name__ == '__main__':
main()
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
# [](https://www.hackerrank.com) HackerRank
[](https://travis-ci.org/rene-d/hackerrank) [](https://www.hackerrank.com/rene_d?hr_r=1)
[HackerRank](https://www.hackerrank.com/dashboard) is a great place to learn, improve, play with your programming skills.
All challenges are runnable, except SQL and multiple choice question ones. They are verified with their default testcase (usually Testcase 0).
## Solutions
[](cpp/)
<a href="c/"><img src="https://hrcdn.net/hackerrank/assets/dashboard/c-43bbd380e51d62b83c4b542c58699a97.svg" width="50px" height="50px"></a>
[](python/)
[](shell/)
[](java/)
[](algorithms/)
[](data-structures/)
[](mathematics/)
[](tutorials/30-days-of-code/)
[](tutorials/cracking-the-coding-interview/)
[](tutorials/10-days-of-statistics/)
[](tutorials/10-days-of-javascript/)
[](regex/)
[](security/)
[](databases/)
[](sql/)
The curated challenges of [Interview Preparation Kit](interview-preparation-kit/).
And [ProjectEuler+](contests/projecteuler/) (See [here](https://github.com/rene-d/math/tree/master/projecteuler) my solutions of [Project Euler](https://projecteuler.net/))
## Usage and tools
### Requirements
- [Python 3.6 or 3.7](https://www.python.org) and some packages : [numpy](http://www.numpy.org), [SciPy](https://www.scipy.org), [scikit-learn](http://scikit-learn.org/), [requests](http://html.python-requests.org), [flake8](http://flake8.readthedocs.io/), [PyYAML](https://pyyaml.org)
- [CMake](https://cmake.org) to build and run tests
- Modern [GCC](https://gcc.gnu.org) or [Clang](https://clang.llvm.org) that comes with macOS or Linux. For Windows, you can use [WSL](https://docs.microsoft.com/en-us/windows/wsl/install-win10), [Cygwin](https://www.cygwin.com) or [Visual Studio Comunity 2017](https://www.visualstudio.com/downloads/)
- [Haskell](https://www.haskell.org) (functional programming only)
- [Java](http://www.oracle.com/technetwork/java/javase/index.html) (some challenges and Java practice)
- [Node.js](https://nodejs.org/)
### Compilation
mkdir build
cd build
cmake ..
make
### Tests
cd build
make extract-testcases
ctest [-R filter]
It will download the challenge testcases and run solution programs.
A solution can be tested solely with `runtest.sh -t challenge-name [-n test-number]` in its build subdirectory.
### Tools
- `hrinit.py` creates a new file for a given challenge based on the HackerRank template. Default choice for language is [Python 3](https://wiki.python.org/moin/Python2orPython3).
- `runtest.sh` is the script used by [CTest](https://cmake.org/Wiki/CMake/Testing_With_CTest) to verify the solution.
- `compare.py` aims to fairly compare the program output with the excepted one. It is necessary since some challenges use decimal numbers : we cannot simply use `diff -qw`.
- `hrtc2.py` can be used to download «purchased» testcases or to create new ones. Copy and paste the download links of input and output data.
### Other (hacking) tools
- `hr_github.sh` creates the testcases archive and sync the private repo to the public GitHub one
- `hr_table.py` automatically creates `README.md` files with challenge lists (requires offline data)
- `hr_count.py` counts challenges and testcases - used to update toplevel `README.md`
- `hr_offline.py` downloads challenge catalogs and more
- `hr_interview.py` gets the playlist of [Interview Preparation Kit](https://www.hackerrank.com/interview/interview-preparation-kit)
- `hr_menu.py` displays a graphical menu to browse challenges and solve them when no Internet connection is available (requires offline data)
- `hr_db.py` creates a SQLite database - used to understand how data is structured (or not!)
### IDE
[Visual Studio Code](https://code.visualstudio.com) is a great free IDE that comes with many [plugins](https://marketplace.visualstudio.com/vscode). Some configuration files are provided and a [Bash](https://www.gnu.org/software/bash/) initialization file.
### Other online resources
* [stack overflow](https://stackoverflow.com) and [Mathematics Stack Exchange](https://math.stackexchange.com)
* [GeeksforGeeks](https://www.geeksforgeeks.org) Computer Science portal and resources
* [Rosetta Code](http://rosettacode.org/wiki/Rosetta_Code)
* [tutorialspoint](https://www.tutorialspoint.com/)
* [Compiler Explorer](https://godbolt.org) Run compilers interactively from your web browser and interact with the assembly ([opensource](https://github.com/mattgodbolt/compiler-explorer)).
* [Ideone](https://ideone.com) Online compiler and debugging tool which allows youto compile source code and execute it online in more than 60 programming languages.
* and many, many more...
## License
[Unlicense](http://unlicense.org) aka. Public Domain 🤘
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
add_subdirectory(concepts)
add_subdirectory(cryptography)
add_subdirectory(functions)
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
### [Security](https://www.hackerrank.com/domains/security)
Practice and study of secure communications
#### [Functions](https://www.hackerrank.com/domains/security/functions)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Security Functions](https://www.hackerrank.com/challenges/security-tutorial-functions)|Complete a function that takes input x and return the remainder of x divided by 11.|[C++](functions/security-tutorial-functions.cpp)|Easy
[Security Functions II](https://www.hackerrank.com/challenges/security-function-ii)|Complete the function that takes x as the input and returns (x*x)|[C++](functions/security-function-ii.cpp)|Easy
[Security Bijective Functions](https://www.hackerrank.com/challenges/security-bijective-functions)|You'll be given an integer n and a function f:X→X where X={1,2,3,...,n}. Determine whether the function is a bijective function or not.|[C++](functions/security-bijective-functions.cpp)|Easy
[Security Function Inverses](https://www.hackerrank.com/challenges/security-inverse-of-a-function)|Find the inverse of a given function f.|[C++](functions/security-inverse-of-a-function.cpp)|Easy
[Security Permutations](https://www.hackerrank.com/challenges/security-tutorial-permutations)|Given a function f, find f(f(x)) for all x ∈ {1,2,3,...,n}.|[C++](functions/security-tutorial-permutations.cpp)|Easy
[Security Involution](https://www.hackerrank.com/challenges/security-involution)|Determine whether function f is an involution or not.|[C++](functions/security-involution.cpp)|Easy
#### [Terminology and Concepts](https://www.hackerrank.com/domains/security/concepts)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Security - Message Space and Ciphertext Space](https://www.hackerrank.com/challenges/security-message-space-and-ciphertext-space)|Given a message, you need to find what message you obtain if you shift each digit in the message string ( 1 to the right and cyclic).|[C++](concepts/security-message-space-and-ciphertext-space.cpp)|Easy
[Security Key Spaces](https://www.hackerrank.com/challenges/security-key-spaces)|Consider a message that consists of decimal digits and a key, e, which operates by shifting each digit by e places. Find the corresponding cipher text.|[C++](concepts/security-key-spaces.cpp)|Easy
[Security Encryption Scheme](https://www.hackerrank.com/challenges/security-encryption-scheme)|Count the number of bijections and the number of keys that produce different encryption functions.|[C++](concepts/security-encryption-scheme.cpp)|Easy
#### [Cryptography](https://www.hackerrank.com/domains/security/cryptography)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[PRNG Sequence Guessing](https://www.hackerrank.com/challenges/prng-sequence-guessing)|Given last ten output values of random.nextInt(), guess the next value to be output by the generator.|[C++](cryptography/prng-sequence-guessing.cpp)|Medium
[Keyword Transposition Cipher](https://www.hackerrank.com/challenges/keyword-transposition-cipher)|Given a piece of cipher text and the keyword used to encipher it, write an algorithm to output the original message .|[Python](cryptography/keyword-transposition-cipher.py)|Easy
[Basic Cryptanalysis](https://www.hackerrank.com/challenges/basic-cryptanalysis)|Given a piece of text encoded with a simple monoalphabetic substitution cipher, use basic cryptanalytic techniques to attempt to recover the original plain text.|[Python](cryptography/basic-cryptanalysis.py)|Hard
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// Security > Functions > Security Involution
// Determine whether function f is an involution or not.
//
// https://www.hackerrank.com/challenges/security-involution/problem
//
#include <cmath>
#include <cstdio>
#include <vector>
#include <iostream>
#include <algorithm>
using namespace std;
int main()
{
unsigned int n;
vector<unsigned int> f;
cin >> n;
f.resize(n + 1);
for (unsigned int i = 1; i <= n; ++i)
{
cin >> f[i];
}
unsigned int i;
for (i = 1; i <= n; ++i)
{
if (f[f[i]] != i) break;
}
cout << ((i == n+1) ? "YES" : "NO");
return 0;
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
add_hackerrank(security-function-ii security-function-ii.cpp)
add_hackerrank(security-tutorial-functions security-tutorial-functions.cpp)
add_hackerrank(security-bijective-functions security-bijective-functions.cpp)
add_hackerrank(security-inverse-of-a-function security-inverse-of-a-function.cpp)
add_hackerrank(security-tutorial-permutations security-tutorial-permutations.cpp)
add_hackerrank(security-involution security-involution.cpp)
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// Security > Functions > Security Function Inverses
// Find the inverse of a given function f.
//
// https://www.hackerrank.com/challenges/security-inverse-of-a-function/problem
//
#include <cmath>
#include <cstdio>
#include <vector>
#include <iostream>
#include <algorithm>
using namespace std;
int main()
{
int n, x;
vector<int> f;
cin >> n;
f.resize(n);
for (int i = 1; i <= n; ++i)
{
cin >> x;
f[x - 1] = i;
}
for (auto i : f)
cout << i << endl;
return 0;
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// Security > Functions > Security Permutations
// Given a function f, find f(f(x)) for all x ∈ {1,2,3,...,n}.
//
// https://www.hackerrank.com/challenges/security-tutorial-permutations/problem
//
#include <cmath>
#include <cstdio>
#include <vector>
#include <iostream>
#include <algorithm>
using namespace std;
int main()
{
int n, x;
vector<int> f;
cin >> n;
f.resize(n);
for (int i = 0; i < n; ++i)
{
cin >> x;
f[i] = x;
}
for (int i = 0; i < n; ++i)
{
// attention à l'indexation de f()
cout << f[f[i] - 1] << endl;
}
return 0;
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// Security > Functions > Security Functions
// Complete a function that takes input x and return the remainder of x divided by 11.
//
// https://www.hackerrank.com/challenges/security-tutorial-functions/problem
//
#include <bits/stdc++.h>
using namespace std;
int calculate(int x) {
// Complete this function
return x % 11;
}
int main() {
int x;
cin >> x;
int result = calculate(x);
cout << result << endl;
return 0;
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// Security > Functions > Security Bijective Functions
// You'll be given an integer n and a function f:X→X where X={1,2,3,...,n}. Determine whether the function is a bijective function or not.
//
// https://www.hackerrank.com/challenges/security-bijective-functions/problem
//
#include <cmath>
#include <cstdio>
#include <set>
#include <iostream>
#include <algorithm>
using namespace std;
int main()
{
size_t n;
unsigned x;
set<unsigned> f;
cin >> n;
// pour que f soit bijective, il faut que :
// x soit dans [1, n]
// l'ensemble des valeurs [1, n] soit couvert => aucun doublon
for (size_t i = 0; i < n; ++i)
{
cin >> x;
if (x < 1 || x > n || f.count(x) == 1)
{
break;
}
f.insert(x);
}
if (f.size() == n) cout << "YES"; else cout << "NO";
return 0;
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// Security > Functions > Security Functions II
// Complete the function that takes x as the input and returns (x*x)
//
// https://www.hackerrank.com/challenges/security-function-ii/problem
//
#include <map>
#include <set>
#include <list>
#include <cmath>
#include <ctime>
#include <climits>
#include <deque>
#include <queue>
#include <stack>
#include <bitset>
#include <cstdio>
#include <limits>
#include <vector>
#include <cstdlib>
#include <fstream>
#include <numeric>
#include <sstream>
#include <iostream>
#include <algorithm>
using namespace std;
// (skeliton_head) ----------------------------------------------------------------------
/*
* Complete the function below.
*/
int the_function(int x) {
return x * x;
}
// (skeliton_tail) ----------------------------------------------------------------------
int main() {
int res;
int _x;
cin >> _x;
cin.ignore (std::numeric_limits<std::streamsize>::max(), '\n');
res = the_function(_x);
cout << res << endl;
return 0;
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
### [Security](https://www.hackerrank.com/domains/security)
Practice and study of secure communications
#### [Functions](https://www.hackerrank.com/domains/security/functions)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Security Functions](https://www.hackerrank.com/challenges/security-tutorial-functions)|Complete a function that takes input x and return the remainder of x divided by 11.|[C++](security-tutorial-functions.cpp)|Easy
[Security Functions II](https://www.hackerrank.com/challenges/security-function-ii)|Complete the function that takes x as the input and returns (x*x)|[C++](security-function-ii.cpp)|Easy
[Security Bijective Functions](https://www.hackerrank.com/challenges/security-bijective-functions)|You'll be given an integer n and a function f:X→X where X={1,2,3,...,n}. Determine whether the function is a bijective function or not.|[C++](security-bijective-functions.cpp)|Easy
[Security Function Inverses](https://www.hackerrank.com/challenges/security-inverse-of-a-function)|Find the inverse of a given function f.|[C++](security-inverse-of-a-function.cpp)|Easy
[Security Permutations](https://www.hackerrank.com/challenges/security-tutorial-permutations)|Given a function f, find f(f(x)) for all x ∈ {1,2,3,...,n}.|[C++](security-tutorial-permutations.cpp)|Easy
[Security Involution](https://www.hackerrank.com/challenges/security-involution)|Determine whether function f is an involution or not.|[C++](security-involution.cpp)|Easy
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
# Security > Cryptography > Keyword Transposition Cipher
# Given a piece of cipher text and the keyword used to encipher it, write an algorithm to output the original message .
#
# https://www.hackerrank.com/challenges/keyword-transposition-cipher/problem
#
import string
import operator
def get_decoder(key):
# key2: key avec les caractères en double supprimés
# n: longueur de key2
u = set()
key2 = list()
for c in key:
if c not in u:
u.add(c)
key2.append(c)
n = len(key2)
# letters: tableau des autres caractères de l'alphabet
letters = list(c for c in string.ascii_uppercase if c not in key2)
letters += [' '] * ((n - len(letters) % n) % n)
letters = list(letters[i:i + n] for i in range(0, len(letters), n))
# complète letters avec key2
letters.insert(0, key2)
# trie letters en fonction de l'ordre de key2
# pour cela, utilise un tuple intermédiaire qui contient l'ordre avant tri
order = list(map(operator.itemgetter(1), sorted((c, i) for i, c in enumerate(key2))))
letters = list((list(a[o] for o in order) for a in letters))
# transpose et met à plat la matrice letters
letters = list(a[i] for i in range(n) for a in letters if a[i] != ' ')
# table de décodage
decoder = dict()
for a, b in zip(string.ascii_uppercase, letters):
decoder[b] = a
return lambda x: decoder.get(x, x)
for _ in range(int(input())):
key = input()
decoder = get_decoder(key)
encoded = input()
print(''.join(list(map(decoder, encoded))))
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
add_hackerrank_py(keyword-transposition-cipher.py)
add_hackerrank(prng-sequence-guessing prng-sequence-guessing.cpp)
add_hackerrank_py(basic-cryptanalysis.py)
configure_file(dictionary.lst dictionary.lst COPYONLY @ONLY)
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
# Security > Cryptography > Basic Cryptanalysis
# Given a piece of text encoded with a simple monoalphabetic substitution cipher, use basic cryptanalytic techniques to attempt to recover the original plain text.
#
# https://www.hackerrank.com/challenges/basic-cryptanalysis/problem
# challenge id: 787
#
# principe: chercher quelle est la bonne permutation de lettres
# le dictionnaire
dictionary = {}
for s in open("dictionary.lst"):
s = s.strip()
if len(s) not in dictionary:
dictionary[len(s)] = list()
dictionary[len(s)].append(s.lower())
# les mots encryptés
words = [s.strip().lower() for s in input().split()]
# toutes les permutations
total_perms = []
for crypted in sorted(words, key=lambda s:-len(s)):
perms = []
for word in dictionary[len(crypted)]:
perm = ['.'] * 52
for a, b in zip(crypted, word):
a = ord(a) - 97
if perm[a] == '.':
perm[a] = b
else:
if perm[a] != b:
break
else:
# crypted -> word est une permutation possible
for a, b in zip(crypted, word):
b = ord(b) - 97
perm[b + 26] = a
perms.append(perm)
if len(perms) == 0:
print("pb avec ", crypted)
exit()
total_perms.append(perms)
# tri par possibilités croissantes
total_perms.sort(key=lambda x: len(x))
# recherche récursive de la solution: on cherche les permutations de lettres
# qui sont compatibles
def search(step, keys):
if step == len(total_perms):
return keys.copy()
for perm in total_perms[step]:
for a, b in zip(keys, perm):
if a == '.' or b == '.':
continue
if a != b:
break
else:
keys2 = keys.copy()
for i, b in enumerate(perm):
if b != '.':
keys2[i] = b
ok = search(step + 1, keys2)
if ok:
return ok
return False
# on y va
k = search(0, ['.'] * 52)
# affiche le résultat
print(*[''.join(k[ord(c) - 97] for c in word) for word in words])
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
AFAIK
AFAIKs
AI
AIDS
AIDSes
AIs
ANSI
ANSIs
ASCII
ASCIIs
Ada
Adas
Amiga
BASIC
BASICs
BBS
BBSes
BITNET
BITNETs
BLT
BLTs
BSD
BSDs
Borg
Borgs
COBOL
COBOLs
Cs
DDT
DDTs
DEC
DECed
DECing
DECs
DP
DPs
Datamation
Datamations
Dilbert
Dilberts
English
Englishes
Eris
Erises
FAQ
FAQs
FM
FMs
FUD
FUDs
GIGO
Guido
IBM
IMHO
Internet
Internets
Java
Knuth
Knuths
Ks
Linus
Linux
Linuxes
MEGO
MEGOs
MIPS
MIPSes
Mars
Marses
Microsoft
Multics
Multicses
NeWS
NeWSes
OS
OSes
OTOH
PARC
PARCs
PD
PM
PMed
PMing
PMs
Pascal
Pascals
Pentium
Pentiums
Perl
Perls
Python
QWERTY
RFC
RFCs
RTFM
RTFMed
RTFMing
RTFMs
SO
SOS
SOSes
SOs
Sun
Suns
TELNET
TELNETTed
TELNETTing
TELNETs
TeX
TeXes
URL
URLs
Unix
Unixes
Usenet
Usenets
VAX
VAXes
WYSIWYG
Winchester
Winchesters
Xes
YMMV
abbrev
abbrevs
accumulator
accumulators
acolyte
acolytes
admin
admins
alt
alts
amoeba
amoebae
amoebas
app
apps
arena
arenas
asbestos
atomic
avatar
avatars
backgammon
background
backgrounds
bandwidth
bandwidths
bang
bangs
banner
banners
bar
barf
barfed
barfing
barfs
barn
barney
barneys
barns
baroque
bars
batch
baud
bauds
bazaar
bazaars
beam
beamed
beaming
beams
beep
beeps
benchmark
benchmarks
beta
betas
bible
bibles
biff
biffed
biffing
biffs
bigot
bigots
bit
bits
blast
blasted
blasting
blasts
blat
blats
blink
blinked
blinking
blinks
blivet
blivets
block
blocked
blocking
blocks
boa
board
boards
boas
bob
bobs
bogus
boink
boinked
boinking
boinks
bomb
bombed
bombing
bombs
boot
booted
booting
boots
bot
bounce
bounced
bounces
bouncing
boustrophedon
boustrophedons
box
boxen
boxes
break
breaking
breaks
brittle
brittler
brittlest
broke
broken
browser
browsers
bug
bugs
bulletproof
bum
bummed
bumming
bump
bumped
bumping
bumps
bums
burble
burbled
burbles
burbling
buzz
buzzed
buzzes
buzzing
byte
bytes
calculator
calculators
can
canned
canning
canonical
cans
cascade
cascades
cat
catatonic
cathedral
cathedrals
cats
catted
catting
chad
chads
chain
chained
chaining
chains
channel
channels
char
chars
check
checks
cheerfully
chemist
chemists
choke
choked
chokes
choking
chomp
chomped
chomper
chompers
chomping
chomps
chrome
chromes
chug
chugged
chugging
chugs
clean
cleaned
cleaner
cleanest
cleaning
cleans
clobber
clobbered
clobbering
clobbers
clock
clocked
clocking
clocks
clone
cloned
clones
cloning
coaster
coasters
code
codes
compact
compacter
compactest
compo
compos
compress
compressed
compresses
compressing
con
condom
condoms
confuser
confusers
cons
consed
conses
consing
console
consoles
cookbook
cookbooks
cookie
cookies
copper
coppers
core
cores
cowboy
cowboys
cracker
crackers
cracking
crackings
crank
cranked
cranking
cranks
crash
crashed
crashes
crashing
cray
crayola
crayolas
crayon
crayons
crays
creationism
creationisms
creep
creeping
creeps
crept
cretin
cretinous
cretins
crippleware
cripplewares
crock
crocks
crumb
crumbs
crunch
crunched
crunches
crunching
cube
cubes
cubing
cubinged
cubinging
cubings
cyberpunk
cyberpunks
cyberspace
cyberspaces
cycle
cycled
cycles
cycling
daemon
daemons
dd
dded
dding
dds
dead
deader
deadest
deadlock
deadlocks
decay
decays
deckle
deckles
defenestration
defenestrations
delta
deltas
demented
demigod
demigods
demo
demoed
demoing
demon
demons
demos
deprecated
diddle
diddled
diddles
diddling
die
died
dies
diff
diffed
diffing
diffs
digit
digits
dike
diked
dikes
diking
ding
dings
dink
dinker
dinkest
dinosaur
dinosaurs
disclaimer
disclaimers
distribution
distributions
doc
docs
documentation
documentations
dodgier
dodgiest
dodgy
dongle
dongles
donuts
donutses
doorstop
doorstops
down
downed
downing
download
downloaded
downloading
downloads
downs
dragon
dragons
drain
drained
draining
drains
driver
drivers
droid
droids
drone
drones
drugged
drum
drums
dump
dumps
dying
earthquake
earthquakes
echo
echoes
echos
ed
eds
elegant
elephantine
elite
elvish
elvishes
email
emailed
emailing
emails
emoticon
emoticons
empire
empires
engine
engines
enhancement
enhancements
epoch
epochs
epsilon
epsilons
erotics
eroticses
evil
eviler
evilest
eviller
evillest
excl
excls
exec
execked
execking
execs
exploit
exploits
factor
factors
fairings
fairingses
fan
fans
faradize
faradized
faradizes
faradizing
farming
farmings
fascist
faultier
faultiest
faulty
feature
features
fence
fences
filter
filters
fine
finer
finest
finger
fingered
fingering
fingers
firefighting
firefightings
firmware
firmwares
fish
fishes
fix
fixes
flag
flags
flakier
flakiest
flaky
flame
flamed
flamer
flamers
flames
flaming
flap
flapped
flapping
flaps
flat
flatten
flattened
flattening
flattens
flatter
flattest
flavor
flavorful
flavors
flippies
flippy
flood
flooded
flooding
floods
flowchart
flowcharts
flush
flushed
flushes
flushing
flytrap
flytraps
followup
followups
foo
fool
fools
footprint
footprints
fora
foreground
foregrounded
foregrounding
foregrounds
forked
forum
forums
fossil
fossils
frag
fragile
fragiler
fragilest
frags
freeware
freewares
freeze
freezes
freezing
fried
fries
frog
frogging
frogginged
frogginging
froggings
frogs
froze
frozen
fry
frying
fudge
fudged
fudges
fudging
fum
fums
funkier
funkiest
funky
fuzzball
fuzzballs
gag
gagged
gagging
gags
gas
gaseous
gases
gassed
gasses
gassing
gen
generate
generated
generates
generating
gens
gig
gigs
gillion
gillions
glass
glasses
glitch
glitched
glitches
glitching
glob
globed
globing
globs
glue
glues
gnarlier
gnarliest
gnarly
gobble
gobbled
gobbles
gobbling
golden
goldener
goldenest
gonk
gonked
gonking
gonks
gonzo
gopher
gophers
gorp
gorps
gotcha
gotchas
gribble
gribbles
grind
grinding
grinds
grok
grokked
grokking
groks
ground
grovel
groveled
groveling
grovelled
grovelling
grovels
grue
grues
grunge
grunges
gun
gunned
gunning
guns
guru
gurus
hack
hacked
hacker
hackers
hacking
hacks
hair
hairball
hairballs
hairier
hairiest
hairs
hairy
hammer
hammered
hammering
hammers
hamster
hamsters
handle
handles
handshaking
handshakings
hang
hanged
hanging
hangs
happily
hardwired
hat
hats
heartbeat
heartbeats
heavyweight
hex
hexadecimal
hexadecimals
hexes
highly
hing
hings
hirsute
hoarding
hoardings
hobbit
hobbits
hog
hogs
hole
holes
hook
hooks
hop
hopped
hopping
hops
hose
hosed
hoses
hosing
hotlink
hotlinks
huff
huffed
huffing
huffs
hung
hyperspace
hyperspaces
ice
ices
idempotent
inc
incantation
incantations
inced
incing
include
included
includes
including
incs
infinite
infinities
infinity
inflate
inflated
inflates
inflating
interesting
interrupt
interrupts
intro
intros
iron
ironmonger
ironmongers
irons
jaggies
jaggieses
jello
jellos
jiffies
jiffy
jock
jocks
kahuna
kahunas
ken
kens
kick
kicked
kicking
kicks
kit
kits
kludge
kludged
kludges
kludging
kluge
kluged
kluges
kluging
knobs
koan
koans
lag
lags
lamer
lamers
lase
lased
lases
lasing
laundromat
laundromats
leak
leaks
leech
leeches
legal
legalese
legaleses
letterbomb
letterbombs
life
lightweight
lint
linted
linting
lints
live
liver
lives
livest
liveware
livewares
lobotomies
lobotomy
logical
lose
loser
losers
loses
losing
loss
losses
lost
lurker
lurkers
machinable
macro
macrologies
macrology
macros
magic
magics
mailbomb
mailbombed
mailbombing
mailbombs
mainframe
mainframes
management
managements
manged
mangeds
mangle
mangled
mangler
manglers
mangles
mangling
marbles
marginal
marginally
martian
martians
massage
massaged
massages
massaging
meg
megs
meme
memes
meta
mickey
mickeys
microfloppies
microfloppieses
minifloppies
minifloppieses
misfeature
misfeatures
mockingbird
mockingbirds
mod
modded
modding
mode
modes
mods
modulo
monstrosities
monstrosity
mu
multitask
multitasks
mumble
munch
munched
munches
munching
munchkin
munchkins
mundane
mundanes
mung
munged
munging
mungs
music
musics
mutter
muttered
muttering
mutters
naive
naiver
naivest
nanobot
nanobots
nanotechnologies
nanotechnology
nature
natures
neophilia
neophilias
nerd
nerds
netiquette
netiquettes
netter
netters
newbie
newbies
newsgroup
newsgroups
nick
nickle
nickles
nicks
noddy
node
nodes
nonlinear
nontrivial
notwork
notworks
nude
nuder
nudest
nuke
nuked
nukes
nuking
numbers
numberses
nybble
nybbled
nybbles
nybbling
nyetwork
nyetworks
obscure
obscurer
obscurest
offline
op
open
opens
ops
optimism
optimisms
orphan
orphans
orthogonal
overrun
overruns
parse
parsed
parses
parsing
pastie
pasties
patch
patched
patches
patching
path
pathological
paths
payware
paywares
peek
peeks
peon
peons
pessimal
pessimaled
pessimaling
pessimals
phage
phages
phase
phases
phreaking
phreakings
ping
pinged
pinging
pings
pipe
pipes
pistol
pistols
playpen
playpens
plonk
plonked
plonking
plonks
plumbing
plumbings
pod
pods
poke
pokes
poll
polled
polling
polls
pop
popped
popping
pops
poser
posers
post
posted
posting
postings
postmaster
postmasters
posts
priesthood
priesthoods
print
printed
printing
prints
profile
profiles
program
programming
programmings
programs
proprietary
protocol
protocols
prowler
prowlers
pseudo
pseudos
puff
puffed
puffing
puffs
punt
punted
punting
punts
push
pushed
pushes
pushing
quad
quads
quantifiers
quarter
quarters
ques
queses
quine
quines
quotient
quotients
random
randomness
randomnesses
randoms
rape
raped
rapes
raping
rave
raved
raves
raving
real
realer
realest
reaper
reapers
recursion
recursions
replicator
replicators
replies
reply
restriction
restrictions
rip
ripoff
ripoffs
ripped
ripping
rips
roach
roached
roaches
roaching
robot
robots
robust
robuster
robustest
rococo
rogue
rogues
root
roots
rude
ruder
rudest
runes
runic
sacred
saga
sagas
said
salt
salts
samizdat
samizdats
samurai
samurais
sandbox
sandboxes
say
saying
says
scag
scagged
scagging
scags
scratch
scratched
scratches
scratching
screen
screens
screw
screws
scribble
scribbles
scrog
scrogged
scrogging
scrogs
segment
segmented
segmenting
segments
selvage
selvages
semi
semis
server
servers
shareware
sharewares
shebang
shebangs
shell
shells
shim
shims
showstopper
showstoppers
shriek
shrieks
sidecar
sidecars
silicon
silicons
silo
silos
skulker
skulkers
slab
slabbed
slabbing
slabs
slack
slacks
slash
slashes
sleep
sleeping
sleeps
slept
slim
slims
slop
slops
slurp
slurped
slurping
slurps
smart
smarter
smartest
smiley
smileys
smoke
smoked
smokes
smoking
smurf
smurfs
snail
snailed
snailing
snails
snap
snapped
snapping
snaps
snarf
snarfed
snarfing
snarfs
snark
snarks
sneaker
sneakers
sniff
sniffed
sniffing
sniffs
softies
softy
spam
spammed
spamming
spams
spangle
spangles
spawn
spawns
speedometer
speedometers
spell
spells
spiffier
spiffiest
spiffy
spike
spiked
spikes
spiking
spin
spinning
spins
splat
splats
spoiler
spoilers
sponge
sponges
spoof
spoofed
spoofing
spoofs
spool
spooled
spooling
spools
spun
stack
stacks
state
states
stoppage
stoppages
store
stores
stroke
strokes
strudel
strudels
studlier
studliest
studly
stunning
suit
suits
sunspots
sunspotses
support
supports
surf
surfed
surfing
surfs
swab
swabbed
swabbing
swabs
swap
swapped
swapping
swaps
swizzle
swizzled
swizzles
swizzling
sync
syncs
sysop
sysops
system
systems
tanked
taste
tastes
tee
tees
tense
tenser
tensest
tentacle
tentacles
test
tests
text
texts
theologies
theology
theories
theory
thrash
thrashed
thrashes
thrashing
thread
threads
thud
thuds
thumb
thumbs
thunk
thunks
tick
ticks
toad
toadded
toadding
toads
toast
toasted
toaster
toasters
toasting
toasts
toggle
toggled
toggles
toggling
tool
tooled
tooling
tools
tourist
touristic
tourists
toy
toys
trampoline
trampolines
trap
trapped
trapping
traps
trash
trashed
trashes
trashing
trawl
trawled
trawling
trawls
trivial
troglodyte
troglodytes
troll
trolled
trolling
trolls
tron
tronned
tronning
trons
tube
tubes
tune
tuned
tunes
tuning
tweak
tweaked
tweaking
tweaks
tweeter
tweeters
twiddle
twiddled
twiddles
twiddling
twink
twinks
uninteresting
up
upload
uploaded
uploading
uploads
upped
upping
ups
urchin
urchins
user
users
vanilla
vaporware
vaporwares
var
vars
verbiage
verbiages
videotex
videotexes
virgin
virtual
virus
viruses
visionaries
visionary
voice
voiced
voices
voicing
wabbit
wabbits
waldo
waldoes
waldos
walk
walks
wall
walled
walling
wallpaper
wallpapers
walls
wank
wanked
wanking
wanks
wannabee
wannabees
warez
warezes
wart
warts
weasel
weasels
wedged
wedgie
wedgies
weeds
weedses
weenie
weenies
wetware
wetwares
whack
whacked
whacker
whackers
whacking
whacks
whales
whaleses
wheel
wheels
widget
widgets
wiggles
wiggleses
win
winner
winners
winning
wins
wired
wireds
wizard
wizardly
wizards
womble
wombles
won
wonkier
wonkiest
wonky
woofer
woofers
workaround
workarounds
worm
wormhole
wormholes
worms
zap
zapped
zapping
zaps
zen
zenned
zenning
zens
zero
zeroed
zeroes
zeroing
zeros
zeroth
zigamorph
zigamorphs
zip
zipped
zipping
zips
zombie
zombies
zorch
zorched
zorches
zorching
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// Security > Cryptography > PRNG Sequence Guessing
// Given last ten output values of random.nextInt(), guess the next value to be output by the generator.
//
// https://www.hackerrank.com/challenges/prng-sequence-guessing/problem
// challenge id: 786
//
#include <iostream>
#include <cstdint>
using namespace std;
uint64_t next_int(uint64_t& seed)
{
seed = (seed * 0x5DEECE66Dull + 0xB) & ((1ull << 48) - 1);
return (seed >> 17) % 1000ull;
}
uint64_t find_seed(const uint64_t a[10])
{
uint64_t seed0, seed1, seed;
int i;
for (seed0 = 0; seed0 < 1ull << 17; ++seed0)
{
seed = seed0 + ((a[0] % 8) << 17);
for (i = 1; i < 10; ++i)
{
if (next_int(seed) % 8 != a[i] % 8)
break;
}
if (i == 10)
{
for (seed1 = (a[0] << 17) + seed0; seed1 < (1ull << 48); seed1 += (1000ull << 17))
{
seed = seed1;
for (i = 1; i < 10; ++i)
{
if (next_int(seed) != a[i])
break;
}
if (i == 10)
return seed1;
}
}
}
return 0;
}
int main()
{
int t, i;
uint64_t a[10], seed, x;
cin >> t;
while (t--)
{
for (i = 0; i < 10; ++i)
cin >> a[i];
seed = find_seed(a);
// nota: le premier nombre de la série est: (seed >> 17) % 1000
for (i = 1; i < 20; ++i)
{
x = next_int(seed);
if (i >= 10) cout << x << " ";
}
cout << endl;
}
return 0;
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
### [Security](https://www.hackerrank.com/domains/security)
Practice and study of secure communications
#### [Cryptography](https://www.hackerrank.com/domains/security/cryptography)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[PRNG Sequence Guessing](https://www.hackerrank.com/challenges/prng-sequence-guessing)|Given last ten output values of random.nextInt(), guess the next value to be output by the generator.|[C++](prng-sequence-guessing.cpp)|Medium
[Keyword Transposition Cipher](https://www.hackerrank.com/challenges/keyword-transposition-cipher)|Given a piece of cipher text and the keyword used to encipher it, write an algorithm to output the original message .|[Python](keyword-transposition-cipher.py)|Easy
[Basic Cryptanalysis](https://www.hackerrank.com/challenges/basic-cryptanalysis)|Given a piece of text encoded with a simple monoalphabetic substitution cipher, use basic cryptanalytic techniques to attempt to recover the original plain text.|[Python](basic-cryptanalysis.py)|Hard
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// Security > Terminology and Concepts > Security - Message Space and Ciphertext Space
// Given a message, you need to find what message you obtain if you shift each digit in the message string ( 1 to the right and cyclic).
//
// https://www.hackerrank.com/challenges/security-message-space-and-ciphertext-space/problem
//
#include <cmath>
#include <string>
#include <vector>
#include <iostream>
#include <algorithm>
using namespace std;
int main() {
/* Enter your code here. Read input from STDIN. Print output to STDOUT */
string s;
cin >> s;
for (auto&& i : s)
{
cout << (i - '0' + 1) % 10;
}
return 0;
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// Security > Terminology and Concepts > Security Key Spaces
// Consider a message that consists of decimal digits and a key, e, which operates by shifting each digit by e places. Find the corresponding cipher text.
//
// https://www.hackerrank.com/challenges/security-key-spaces/problem
//
#include <cmath>
#include <string>
#include <vector>
#include <iostream>
#include <algorithm>
using namespace std;
int main() {
/* Enter your code here. Read input from STDIN. Print output to STDOUT */
string s;
int k;
cin >> s;
cin >> k;
for (auto&& i : s)
{
cout << (i - '0' + k) % 10;
}
return 0;
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// Security > Terminology and Concepts > Security Encryption Scheme
// Count the number of bijections and the number of keys that produce different encryption functions.
//
// https://www.hackerrank.com/challenges/security-encryption-scheme/problem
//
#include <cmath>
#include <cstdio>
#include <vector>
#include <iostream>
#include <algorithm>
using namespace std;
// la réponse est: n!
int main() {
int n;
int f = 1;
cin >> n;
for (int i = 1; i <= n; ++i) f *= i;
cout << f;
return 0;
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
add_hackerrank(security-message-space-and-ciphertext-space security-message-space-and-ciphertext-space.cpp)
add_hackerrank(security-key-spaces security-key-spaces.cpp)
add_hackerrank(security-encryption-scheme security-encryption-scheme.cpp)
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
### [Security](https://www.hackerrank.com/domains/security)
Practice and study of secure communications
#### [Terminology and Concepts](https://www.hackerrank.com/domains/security/concepts)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Security - Message Space and Ciphertext Space](https://www.hackerrank.com/challenges/security-message-space-and-ciphertext-space)|Given a message, you need to find what message you obtain if you shift each digit in the message string ( 1 to the right and cyclic).|[C++](security-message-space-and-ciphertext-space.cpp)|Easy
[Security Key Spaces](https://www.hackerrank.com/challenges/security-key-spaces)|Consider a message that consists of decimal digits and a key, e, which operates by shifting each digit by e places. Find the corresponding cipher text.|[C++](security-key-spaces.cpp)|Easy
[Security Encryption Scheme](https://www.hackerrank.com/challenges/security-encryption-scheme)|Count the number of bijections and the number of keys that produce different encryption functions.|[C++](security-encryption-scheme.cpp)|Easy
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
# init file for bash
# mimics bash init behaviour
source /etc/profile
[ -f ~/.bash_profile ] && source ~/.bash_profile
[ -f ~/.bashc -a ! -f ~/.bash_profile ] && source ~/.bashrc
workspaceFolder=${workspaceFolder:-$(pwd)}
PATH="${workspaceFolder}":$PATH
CDPATH=:"${workspaceFolder}"
alias cdd='cd "${workspaceFolder}"'
alias cdb='cd "${workspaceFolder}/build"'
build()
{
local nproc
local option
local opt_verbose=${VERBOSE}
local opt_type=
local opt_dryrun=
local opt_target=
local OPTIND
local OPTARG
while getopts "hvdDrRcn" option; do
#echo option=$option OPTARG=$OPTARG OPTIND=$OPTIND OPTERR=$OPTERR OPTSTRING=$OPTSTRING
case "${option}" in
h|\?)
echo "Build challenges"
echo "Usage: build [options]"
echo "Options:"
echo " -d -D switch build type to Debug"
echo " -r -R switch build type to Release"
echo " -c rebuild cache"
echo " -v set verbose mode"
echo " -n dry run (do nothing)"
return
;;
v) opt_verbose=1 ;;
d|D) opt_type=Debug ;;
r|R) opt_type=Release ;;
c) opt_target=rebuild_cache ;;
n) opt_dryrun=1 ;;
esac
done
shift $((OPTIND-1))
local build_dir=${1:-build}
# use the previous build type if none given on the command line
if [ "${opt_type}" = "" ]; then
previous_type=$(cd "${workspaceFolder}" && [ -d ${build_dir} ] && sed -n -e 's/CMAKE_BUILD_TYPE:STRING=\(.*\)/\1/p' ${build_dir}/CMakeCache.txt)
opt_type=${previous_type:-Debug}
fi
if [ $(uname) = Darwin ] ; then
nproc=$(sysctl -n hw.logicalcpu)
else
nproc=$(nproc)
fi
# display a resume of options
echo -en "\033[1;36m======================== \033[37;42mHackerRank\033[0m "
echo -en "\033[1;36m~~\033[0m "
echo -en "${build_dir} "
echo -en "\033[1;36m~~\033[0m "
echo -en "\033[0;1;33m${opt_type} "
echo -en "\033[1;36m~~\033[0m "
[ ${opt_verbose} ] && echo -en "\033[1;35mVERBOSE\033[0m "
echo -en "(${nproc} cpu) "
echo -en "\033[1;36m=========================\033[0m"
echo
[ $opt_dryrun ] && return
(
set -e
cd "${workspaceFolder}"
mkdir -p ${build_dir}
cd ${build_dir}
cmake -DHACKERRANK_FP:BOOL=OFF -DCMAKE_BUILD_TYPE=${opt_type} "${workspaceFolder}"
VERBOSE=${opt_verbose} make -j${nproc} ${opt_target}
)
}
COLOR_WHITE_ON_GREEN="\033[37;42m"
LIGHT_CYAN="\033[1;36m"
COLOR_END="\033[0m"
echo -e "${COLOR_WHITE_ON_GREEN}HackerRank${COLOR_END} on ${LIGHT_CYAN}$(uname)${COLOR_END} ready."
unset COLOR_WHITE_ON_GREEN LIGHT_CYAN COLOR_END
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
{
"files.associations": {
"bashrc": "shellscript",
"cstdlib": "cpp",
"ios": "cpp",
"queue": "cpp",
"stack": "cpp",
"__bit_reference": "cpp",
"__hash_table": "cpp",
"__split_buffer": "cpp",
"__tree": "cpp",
"deque": "cpp",
"initializer_list": "cpp",
"iterator": "cpp",
"list": "cpp",
"map": "cpp",
"set": "cpp",
"string": "cpp",
"string_view": "cpp",
"unordered_map": "cpp",
"vector": "cpp",
"ostream": "cpp",
"istream": "cpp",
"__string": "cpp",
"algorithm": "cpp",
"exception": "cpp",
"iosfwd": "cpp",
"__locale": "cpp",
"__config": "cpp",
"__nullptr": "cpp",
"new": "cpp",
"stdexcept": "cpp",
"typeinfo": "cpp",
"stdc++.h.in": "cpp",
"cstddef": "cpp",
"type_traits": "cpp",
"bitset": "cpp",
"utility": "cpp",
"__functional_base": "cpp",
"__functional_base_03": "cpp",
"__tuple": "cpp",
"chrono": "cpp",
"functional": "cpp",
"limits": "cpp",
"memory": "cpp",
"ratio": "cpp",
"tuple": "cpp",
"string.h": "c",
"locale": "cpp",
"numeric": "cpp",
"cmath": "cpp",
"iostream": "cpp"
},
"files.exclude": {
"**/.git": true,
"**/.svn": true,
"**/.hg": true,
"**/CVS": true,
"**/.DS_Store": true,
"**/build/": true
},
"terminal.integrated.fontSize": 12,
"terminal.integrated.env.osx": {
"workspaceFolder": "${workspaceFolder}"
},
"terminal.integrated.env.linux": {
"workspaceFolder": "${workspaceFolder}"
},
"terminal.integrated.env.windows": {
"workspaceFolder": "${workspaceFolder}"
},
"terminal.integrated.shell.osx": "/bin/bash",
"terminal.integrated.shellArgs.osx": [
"--rcfile",
".vscode/bashrc",
],
"terminal.integrated.shell.windows":"C:/Program Files/Git/usr/bin/bash.exe",
"terminal.integrated.shellArgs.windows": [
"--rcfile",
".vscode/bashrc"
],
"terminal.integrated.shellArgs.linux": [
"--rcfile",
".vscode/bashrc"
],
"python.linting.enabled": false,
"editor.trimAutoWhitespace": true,
"files.trimTrailingWhitespace": true,
"files.trimFinalNewlines": true
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build debug",
"type": "shell",
"command": "mkdir -p debug && cd debug && cmake -DCMAKE_BUILD_TYPE=Debug .. && make",
"group": {
"kind": "build",
"isDefault": true
},
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated"
},
"problemMatcher": ["$gcc"]
},
{
"label": "build release",
"type": "shell",
"command": "mkdir -p build && cd build && cmake -DCMAKE_BUILD_TYPE=Release .. && make",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "shared"
},
"problemMatcher": ["$gcc"]
},
{
"label": "autopep8 current file",
"type": "process",
"command": "${config:python.pythonPath}",
"args": [
"-m",
"autopep8",
"-i",
"${file}"
]
},
{
"label": "build test",
"type": "shell",
"command": "mkdir -p debug && cd debug && cmake -DCMAKE_BUILD_TYPE=Debug .. && make && ctest",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated"
},
"problemMatcher": ["$gcc"]
},
]
} | {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
{
// Utilisez IntelliSense pour en savoir plus sur les attributs possibles.
// Pointez pour afficher la description des attributs existants.
// Pour plus d'informations, visitez : https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "(lldb) structuring-the-document",
"type": "cppdbg",
"request": "launch",
"program": "${workspaceFolder}/build/c/c-structs-and-enums/structuring-the-document",
"args": [],
"stopAtEntry": false,
"cwd": "${workspaceFolder}/build/c/c-structs-and-enums",
"environment": [],
"externalConsole": true,
"MIMode": "lldb"
},
{
"name": "(lldb) querying-the-document",
"type": "cppdbg",
"request": "launch",
"program": "${workspaceFolder}/build/c/c-functions/querying-the-document",
"args": [],
"stopAtEntry": false,
"cwd": "${workspaceFolder}/build/c/c-functions",
"environment": [],
"externalConsole": true,
"MIMode": "lldb"
},
{
"name": "Python: Current File",
"type": "python",
"request": "launch",
"program": "${file}"
},
{
"name": "(lldb) algo/hamming-distance",
"type": "cppdbg",
"request": "launch",
"program": "${workspaceFolder}/debug/algo/hamming-distance",
"args": [ "tests/hamming-distance/input/input25.txt" ],
"stopAtEntry": false,
"cwd": "${workspaceFolder}/build/algo",
"environment": [],
"externalConsole": true,
"MIMode": "lldb"
},
{
"name": "(lldb) cpp/deque-stl",
"type": "cppdbg",
"request": "launch",
"program": "${workspaceFolder}/debug/cpp/deque-stl",
"args": [],
"stopAtEntry": false,
"cwd": "${workspaceFolder}",
"environment": [],
"externalConsole": true,
"MIMode": "lldb"
}
]
} | {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
add_subdirectory(warmup)
add_subdirectory(implementation)
add_subdirectory(strings)
add_subdirectory(greedy)
add_subdirectory(arrays-and-sorting)
add_subdirectory(search)
add_subdirectory(dynamic-programming)
add_subdirectory(bit-manipulation)
add_subdirectory(game-theory)
add_subdirectory(algo-debugging)
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
### [Algorithms](https://www.hackerrank.com/domains/algorithms)
The true test of problem solving: when one realizes that time and memory aren't infinite.
#### [Warmup](https://www.hackerrank.com/domains/algorithms/warmup)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Solve Me First](https://www.hackerrank.com/challenges/solve-me-first)|This is an easy challenge to help you start coding in your favorite languages!|[C++](warmup/solve-me-first.cpp) [Haskell](warmup/solve-me-first.hs) [Java](warmup/solve-me-first.java) [Javascript](warmup/solve-me-first.js) [Python](warmup/solve-me-first.py) [bash](warmup/solve-me-first.sh)|Easy
[Simple Array Sum](https://www.hackerrank.com/challenges/simple-array-sum)|Calculate the sum of integers in an array.|[C++](warmup/simple-array-sum.cpp) [Python](warmup/simple-array-sum.py)|Easy
[Compare the Triplets](https://www.hackerrank.com/challenges/compare-the-triplets)|Compare the elements in two triplets.|[C++](warmup/compare-the-triplets.cpp) [Python](warmup/compare-the-triplets.py)|Easy
[A Very Big Sum](https://www.hackerrank.com/challenges/a-very-big-sum)|Calculate the sum of the values in an array that might exceed the range of int values.|[Python](warmup/a-very-big-sum.py)|Easy
[Diagonal Difference](https://www.hackerrank.com/challenges/diagonal-difference)|Calculate the absolute difference of sums across the two diagonals of a square matrix.|[Python](warmup/diagonal-difference.py)|Easy
[Plus Minus](https://www.hackerrank.com/challenges/plus-minus)|Calculate the fraction of positive, negative and zero values in an array.|[Python](warmup/plus-minus.py)|Easy
[Staircase](https://www.hackerrank.com/challenges/staircase)|Print a right-aligned staircase with n steps.|[Python](warmup/staircase.py)|Easy
[Mini-Max Sum](https://www.hackerrank.com/challenges/mini-max-sum)|Find the maximum and minimum values obtained by summing four of five integers.|[Python](warmup/mini-max-sum.py)|Easy
[Birthday Cake Candles](https://www.hackerrank.com/challenges/birthday-cake-candles)|Determine the number of candles that are blown out.|[Python](warmup/birthday-cake-candles.py)|Easy
[Time Conversion](https://www.hackerrank.com/challenges/time-conversion)|Convert time from an AM/PM format to a 24 hour format.|[Python](warmup/time-conversion.py)|Easy
#### [Implementation](https://www.hackerrank.com/domains/algorithms/implementation)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Grading Students](https://www.hackerrank.com/challenges/grading)|Round student grades according to Sam's rules.|[Python](implementation/grading.py)|Easy
[Apple and Orange](https://www.hackerrank.com/challenges/apple-and-orange)|Find the respective numbers of apples and oranges that fall on Sam's house.|[Python](implementation/apple-and-orange.py)|Easy
[Kangaroo](https://www.hackerrank.com/challenges/kangaroo)|Can two kangaroo meet after making the same number of jumps?|[Python](implementation/kangaroo.py)|Easy
[Between Two Sets](https://www.hackerrank.com/challenges/between-two-sets)|Find the number of integers that satisfies certain criteria relative to two sets.|[Python](implementation/between-two-sets.py)|Easy
[Breaking the Records](https://www.hackerrank.com/challenges/breaking-best-and-worst-records)|Given an array of Maria's basketball scores all season, determine the number of times she breaks her best and worst records.|[Python](implementation/breaking-best-and-worst-records.py)|Easy
[Birthday Chocolate](https://www.hackerrank.com/challenges/the-birthday-bar)|Given an array of integers, find the number of subarrays of length k having sum s.|[Python](implementation/the-birthday-bar.py)|Easy
[Divisible Sum Pairs](https://www.hackerrank.com/challenges/divisible-sum-pairs)|Count the number of pairs in an array having sums that are evenly divisible by a given number.|[Python](implementation/divisible-sum-pairs.py)|Easy
[Migratory Birds](https://www.hackerrank.com/challenges/migratory-birds)|Determine which type of bird in a flock occurs at the highest frequency.|[Python](implementation/migratory-birds.py)|Easy
[Day of the Programmer](https://www.hackerrank.com/challenges/day-of-the-programmer)|Given year, determine date of the 256th day of the year.|[Python](implementation/day-of-the-programmer.py)|Easy
[Bon Appétit](https://www.hackerrank.com/challenges/bon-appetit)|Determine whether or not Brian overcharged Anna for their split bill.|[Python](implementation/bon-appetit.py)|Easy
[Sock Merchant](https://www.hackerrank.com/challenges/sock-merchant)|How many pairs of socks John can sell?|[Python](implementation/sock-merchant.py)|Easy
[Drawing Book ](https://www.hackerrank.com/challenges/drawing-book)|How many pages does Brie need to turn to get to page p?|[Python](implementation/drawing-book.py)|Easy
[Counting Valleys](https://www.hackerrank.com/challenges/counting-valleys)|Count the valleys encountered during vacation.|[C++](implementation/counting-valleys.cpp)|Easy
[Electronics Shop](https://www.hackerrank.com/challenges/electronics-shop)|Determine the most expensive Keyboard and USB drive combination Monica can purchase within her budget.|[C++](implementation/electronics-shop.cpp)|Easy
[Cats and a Mouse](https://www.hackerrank.com/challenges/cats-and-a-mouse)|Which cat will catch the mouse first?|[Python](implementation/cats-and-a-mouse.py)|Easy
[Forming a Magic Square](https://www.hackerrank.com/challenges/magic-square-forming)|Find the minimum cost of converting a 3 by 3 matrix into a magic square.|[Python](implementation/magic-square-forming.py)|Medium
[Picking Numbers](https://www.hackerrank.com/challenges/picking-numbers)|What's the largest size subset can you choose from an array such that the difference between any two integers is not bigger than 1?|[Python](implementation/picking-numbers.py)|Easy
[Climbing the Leaderboard](https://www.hackerrank.com/challenges/climbing-the-leaderboard)|Help Alice track her progress toward the top of the leaderboard!|[Python](implementation/climbing-the-leaderboard.py)|Medium
[The Hurdle Race](https://www.hackerrank.com/challenges/the-hurdle-race)|Can you help Dan determine the minimum number of magic beverages he must drink to jump all the hurdles?|[Python](implementation/the-hurdle-race.py)|Easy
[Designer PDF Viewer](https://www.hackerrank.com/challenges/designer-pdf-viewer)|Help finding selection area in PDF Viewer.|[Python](implementation/designer-pdf-viewer.py)|Easy
[Utopian Tree](https://www.hackerrank.com/challenges/utopian-tree)|Predict the height of the tree after N growth cycles.|[Python](implementation/utopian-tree.py)|Easy
[Angry Professor](https://www.hackerrank.com/challenges/angry-professor)|Decide whether or not the class will be canceled based on the arrival times of its students.|[Python](implementation/angry-professor.py)|Easy
[Beautiful Days at the Movies](https://www.hackerrank.com/challenges/beautiful-days-at-the-movies)|Find the number of beautiful days.|[Python](implementation/beautiful-days-at-the-movies.py)|Easy
[Viral Advertising](https://www.hackerrank.com/challenges/strange-advertising)|How many people will know about the new product after n days?|[Python](implementation/strange-advertising.py)|Easy
[Save the Prisoner!](https://www.hackerrank.com/challenges/save-the-prisoner)|Given M sweets and a circular queue of N prisoners, find the ID of the last prisoner to receive a sweet.|[Python](implementation/save-the-prisoner.py)|Easy
[Circular Array Rotation](https://www.hackerrank.com/challenges/circular-array-rotation)|Print the elements in an array after 'm' right circular rotation operations.|[Python](implementation/circular-array-rotation.py)|Easy
[Sequence Equation](https://www.hackerrank.com/challenges/permutation-equation)|Find some y satisfying p(p(y)) = x for each x from 1 to n.|[Python](implementation/permutation-equation.py)|Easy
[Jumping on the Clouds: Revisited](https://www.hackerrank.com/challenges/jumping-on-the-clouds-revisited)|Determine the amount of energy Aerith has after the cloud game ends.|[Python](implementation/jumping-on-the-clouds-revisited.py)|Easy
[Find Digits](https://www.hackerrank.com/challenges/find-digits)|Calculate the number of digits in an integer that evenly divide it.|[Python](implementation/find-digits.py)|Easy
[Extra Long Factorials](https://www.hackerrank.com/challenges/extra-long-factorials)|Calculate a very large factorial that doesn't fit in the conventional numeric data types.|[Haskell](implementation/extra-long-factorials.hs) [Python](implementation/extra-long-factorials.py)|Medium
[Append and Delete](https://www.hackerrank.com/challenges/append-and-delete)|Can you convert $s$ to $t$ by performing exactly $k$ operations?|[Python](implementation/append-and-delete.py)|Easy
[Sherlock and Squares](https://www.hackerrank.com/challenges/sherlock-and-squares)|Find the count of square numbers between A and B|[Python](implementation/sherlock-and-squares.py)|Easy
[Library Fine](https://www.hackerrank.com/challenges/library-fine)|Help your library calculate fees for late books!|[Python](implementation/library-fine.py)|Easy
[Cut the sticks](https://www.hackerrank.com/challenges/cut-the-sticks)|Given the lengths of n sticks, print the number of sticks that are left before each cut operation.|[Python](implementation/cut-the-sticks.py)|Easy
[Non-Divisible Subset](https://www.hackerrank.com/challenges/non-divisible-subset)|Find the size of the maximal non-divisible subset.|[Python](implementation/non-divisible-subset.py)|Medium
[Repeated String](https://www.hackerrank.com/challenges/repeated-string)|Find and print the number of letter a's in the first n letters of an infinitely large periodic string.|[Python](implementation/repeated-string.py)|Easy
[Jumping on the Clouds](https://www.hackerrank.com/challenges/jumping-on-the-clouds)|Jumping on the clouds|[Python](implementation/jumping-on-the-clouds.py)|Easy
[Equalize the Array](https://www.hackerrank.com/challenges/equality-in-a-array)|Delete a minimal number of elements from an array so that all elements of the modified array are equal to one another.|[Python](implementation/equality-in-a-array.py)|Easy
[Queen's Attack II](https://www.hackerrank.com/challenges/queens-attack-2)|Find the number of squares the queen can attack.|[Python](implementation/queens-attack-2.py)|Medium
[ACM ICPC Team](https://www.hackerrank.com/challenges/acm-icpc-team)|Print the maximum topics a given team can cover for ACM ICPC World Finals|[Python](implementation/acm-icpc-team.py)|Easy
[Taum and B'day](https://www.hackerrank.com/challenges/taum-and-bday)|Calculate the minimum cost required to buy some amounts of two types of gifts when costs of each type and the rate of conversion from one form to another is provided.|[Python](implementation/taum-and-bday.py)|Easy
[Organizing Containers of Balls](https://www.hackerrank.com/challenges/organizing-containers-of-balls)|Determine if David can perform some sequence of swap operations such that each container holds one distinct type of ball.|[Python](implementation/organizing-containers-of-balls.py)|Medium
[Encryption](https://www.hackerrank.com/challenges/encryption)|Encrypt a string by arranging the characters of a string into a matrix and printing the resulting matrix column wise.|[Python](implementation/encryption.py)|Medium
[Bigger is Greater](https://www.hackerrank.com/challenges/bigger-is-greater)|Rearrange the letters of a string to construct another string such that the new string is lexicographically greater than the original.|[Python](implementation/bigger-is-greater.py)|Medium
[Modified Kaprekar Numbers](https://www.hackerrank.com/challenges/kaprekar-numbers)|Print kaprekar numbers in the given range|[Python](implementation/kaprekar-numbers.py)|Easy
[Beautiful Triplets](https://www.hackerrank.com/challenges/beautiful-triplets)||[Python](implementation/beautiful-triplets.py)|Easy
[Minimum Distances](https://www.hackerrank.com/challenges/minimum-distances)|Find the minimum distance between two different indices containing the same integers.|[Python](implementation/minimum-distances.py)|Easy
[Halloween Sale](https://www.hackerrank.com/challenges/halloween-sale)|How many games can you buy during the Halloween Sale?|[Python](implementation/halloween-sale.py)|Easy
[The Time in Words](https://www.hackerrank.com/challenges/the-time-in-words)|Display the time in words.|[Python](implementation/the-time-in-words.py)|Medium
[Chocolate Feast ](https://www.hackerrank.com/challenges/chocolate-feast)|Calculate the number of chocolates that can be bought following the given conditions.|[Python](implementation/chocolate-feast.py)|Easy
[Service Lane](https://www.hackerrank.com/challenges/service-lane)|Calculate the maximum width of the vehicle that can pass through a service lane.|[Python](implementation/service-lane.py)|Easy
[Lisa's Workbook](https://www.hackerrank.com/challenges/lisa-workbook)|A workbook with chapters. Some number of problems per page. How many problems have a number equal to a page's number?|[Python](implementation/lisa-workbook.py)|Easy
[Flatland Space Stations](https://www.hackerrank.com/challenges/flatland-space-stations)|Find the maximum distance an astronaut needs to travel to reach the nearest space station.|[Python](implementation/flatland-space-stations.py)|Easy
[Fair Rations](https://www.hackerrank.com/challenges/fair-rations)|How many loaves of bread will it take to feed your subjects?|[Python](implementation/fair-rations.py)|Easy
[Cavity Map](https://www.hackerrank.com/challenges/cavity-map)|Depict cavities on a square map|[C++](implementation/cavity-map.cpp)|Easy
[Manasa and Stones](https://www.hackerrank.com/challenges/manasa-and-stones)|Calculate the possible values of the last stone where consecutive values on the stones differ by a value 'a' or a value 'b'.|[Python](implementation/manasa-and-stones.py)|Easy
[The Grid Search](https://www.hackerrank.com/challenges/the-grid-search)|Given a 2D array of digits, try to find a given 2D grid pattern of digits within it.|[Python](implementation/the-grid-search.py)|Medium
[Happy Ladybugs](https://www.hackerrank.com/challenges/happy-ladybugs)|Determine whether or not all the ladybugs can be made happy.|[Python](implementation/happy-ladybugs.py)|Easy
[Strange Counter](https://www.hackerrank.com/challenges/strange-code)|Print the value displayed by the counter at a given time, $t$.|[Python](implementation/strange-code.py)|Easy
[Absolute Permutation](https://www.hackerrank.com/challenges/absolute-permutation)|Find lexicographically smallest absolute permutation.|[Python](implementation/absolute-permutation.py)|Medium
[Ema's Supercomputer](https://www.hackerrank.com/challenges/two-pluses)|Determine the product of the areas of two pluses on a grid.|[C++](implementation/two-pluses.cpp)|Medium
[Larry's Array](https://www.hackerrank.com/challenges/larrys-array)|Larry|[Python](implementation/larrys-array.py)|Medium
[Almost Sorted](https://www.hackerrank.com/challenges/almost-sorted)|Sort an array by either swapping or reversing a segment|[Python](implementation/almost-sorted.py)|Medium
[Matrix Layer Rotation ](https://www.hackerrank.com/challenges/matrix-rotation-algo)|Rotate the matrix R times and print the resultant matrix.|[Python](implementation/matrix-rotation-algo.py)|Hard
#### [Strings](https://www.hackerrank.com/domains/algorithms/strings)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Super Reduced String](https://www.hackerrank.com/challenges/reduced-string)|Given a string, repeatedly remove adjacent pairs of matching characters and then print the reduced result.|[Python](strings/reduced-string.py)|Easy
[CamelCase](https://www.hackerrank.com/challenges/camelcase)||[Python](strings/camelcase.py)|Easy
[Strong Password](https://www.hackerrank.com/challenges/strong-password)|How many characters should you add to make the password strong?|[Python](strings/strong-password.py)|Easy
[Two Characters](https://www.hackerrank.com/challenges/two-characters)|Print the length of the longest possible string $t$ you can form.|[Python](strings/two-characters.py)|Easy
[Caesar Cipher](https://www.hackerrank.com/challenges/caesar-cipher-1)|Encrypt a string by rotating the alphabets by a fixed value in the string.|[Python](strings/caesar-cipher-1.py)|Easy
[Mars Exploration](https://www.hackerrank.com/challenges/mars-exploration)|Save Our Ship!|[Python](strings/mars-exploration.py)|Easy
[HackerRank in a String!](https://www.hackerrank.com/challenges/hackerrank-in-a-string)|Determine if a string contains a subsequence of characters that spell "hackerrank".|[Python](strings/hackerrank-in-a-string.py)|Easy
[Pangrams](https://www.hackerrank.com/challenges/pangrams)|Check whether a given string is a panagram or not.|[Python](strings/pangrams.py)|Easy
[Weighted Uniform Strings](https://www.hackerrank.com/challenges/weighted-uniform-string)|Determine if a string contains uniform substrings of certain weights.|[Python](strings/weighted-uniform-string.py)|Easy
[Separate the Numbers](https://www.hackerrank.com/challenges/separate-the-numbers)|Determine if a numeric string can be broken into a sequence of increasing numbers.|[Python](strings/separate-the-numbers.py)|Easy
[Funny String](https://www.hackerrank.com/challenges/funny-string)|Is the absolute difference between consecutive characters is the same for a string and the reverse of that string for all indices.|[Python](strings/funny-string.py)|Easy
[Gemstones](https://www.hackerrank.com/challenges/gem-stones)|Find the number of different gem-elements present.|[Python](strings/gem-stones.py)|Easy
[Alternating Characters ](https://www.hackerrank.com/challenges/alternating-characters)|Calculate the minimum number of deletions required to convert a string into a string in which consecutive characters are different.|[Python](strings/alternating-characters.py)|Easy
[Beautiful Binary String](https://www.hackerrank.com/challenges/beautiful-binary-string)|How many binary characters must you change to remove every occurrence of "010" from a binary string?|[Python](strings/beautiful-binary-string.py)|Easy
[The Love-Letter Mystery](https://www.hackerrank.com/challenges/the-love-letter-mystery)|Find the minimum number of operations required to convert a given string into a palindrome under certain conditions|[Python](strings/the-love-letter-mystery.py)|Easy
[Determining DNA Health](https://www.hackerrank.com/challenges/determining-dna-health)|Determine which weighted substrings in a subset of substrings can be found in a given string and calculate the string's total weight.|[Python](strings/determining-dna-health.py)|Hard
[Palindrome Index](https://www.hackerrank.com/challenges/palindrome-index)|Determine which character(s) must be removed to make a string a palindrome.|[Python](strings/palindrome-index.py)|Easy
[Anagram](https://www.hackerrank.com/challenges/anagram)|Find the minimum number of characters of the first string that we need to change in order to make it an anagram of the second string.|[Python](strings/anagram.py)|Easy
[Making Anagrams](https://www.hackerrank.com/challenges/making-anagrams)|How many characters should one delete to make two given strings anagrams of each other?|[Python](strings/making-anagrams.py)|Easy
[Game of Thrones - I](https://www.hackerrank.com/challenges/game-of-thrones)|Check whether any anagram of a string can be a palindrome or not.|[Python](strings/game-of-thrones.py)|Easy
[Two Strings](https://www.hackerrank.com/challenges/two-strings)|Given two strings, you find a common substring of non-zero length.|[Python](strings/two-strings.py)|Easy
[String Construction ](https://www.hackerrank.com/challenges/string-construction)|Find the minimum cost of copying string s.|[Python](strings/string-construction.py)|Easy
[Sherlock and the Valid String](https://www.hackerrank.com/challenges/sherlock-and-valid-string)|Remove some characters from the string such that the new string's characters have the same frequency.|[Python](strings/sherlock-and-valid-string.py)|Medium
[Highest Value Palindrome](https://www.hackerrank.com/challenges/richie-rich)|Make a number palindromic in no more than $k$ moves, maximal.|[Python](strings/richie-rich.py)|Medium
[Sherlock and Anagrams](https://www.hackerrank.com/challenges/sherlock-and-anagrams)|Find the number of unordered anagramic pairs of substrings of a string.|[Python](strings/sherlock-and-anagrams.py)|Medium
[Common Child](https://www.hackerrank.com/challenges/common-child)|Given two strings a and b of equal length, what's the longest string (s) that can be constructed such that s is a child to both a and b?|[C++](strings/common-child.cpp) [Python](strings/common-child.py)|Medium
[Morgan and a String](https://www.hackerrank.com/challenges/morgan-and-a-string)|Find the lexicographically minimal string that can be formed by the combination of two strings.|[C](strings/morgan-and-a-string.c)|Expert
#### [Sorting](https://www.hackerrank.com/domains/algorithms/arrays-and-sorting)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Big Sorting](https://www.hackerrank.com/challenges/big-sorting)|Sort an array of very long numeric strings.|[Python](arrays-and-sorting/big-sorting.py)|Easy
[Intro to Tutorial Challenges](https://www.hackerrank.com/challenges/tutorial-intro)|Introduction to the Tutorial Challenges|[Python](arrays-and-sorting/tutorial-intro.py)|Easy
[Insertion Sort - Part 1](https://www.hackerrank.com/challenges/insertionsort1)|Insert an element into a sorted array.|[Python](arrays-and-sorting/insertionsort1.py)|Easy
[Insertion Sort - Part 2](https://www.hackerrank.com/challenges/insertionsort2)|Code Insertion Sort itself.|[Python](arrays-and-sorting/insertionsort2.py)|Easy
[Correctness and the Loop Invariant](https://www.hackerrank.com/challenges/correctness-invariant)|How do you demonstrate the correctness of an algorithm? You can use the loop invariant.|[Python](arrays-and-sorting/correctness-invariant.py)|Easy
[Running Time of Algorithms](https://www.hackerrank.com/challenges/runningtime)|The running time of Algorithms in general and Insertion Sort in particular.|[Python](arrays-and-sorting/runningtime.py)|Easy
[Quicksort 1 - Partition](https://www.hackerrank.com/challenges/quicksort1)|Perform the first step of Quicksort: partitioning an array.|[Python](arrays-and-sorting/quicksort1.py)|Easy
[Counting Sort 1](https://www.hackerrank.com/challenges/countingsort1)|Count the number of times each value appears.|[Python](arrays-and-sorting/countingsort1.py)|Easy
[Counting Sort 2](https://www.hackerrank.com/challenges/countingsort2)|Simple version of counting sort.|[Python](arrays-and-sorting/countingsort2.py)|Easy
[The Full Counting Sort](https://www.hackerrank.com/challenges/countingsort4)|The real counting sort.|[Python](arrays-and-sorting/countingsort4.py)|Medium
[Closest Numbers](https://www.hackerrank.com/challenges/closest-numbers)|Find the closest numbers in a list.|[Python](arrays-and-sorting/closest-numbers.py)|Easy
[Find the Median](https://www.hackerrank.com/challenges/find-the-median)|Find the Median in a list of numbers.|[Python](arrays-and-sorting/find-the-median.py)|Easy
[Insertion Sort Advanced Analysis](https://www.hackerrank.com/challenges/insertion-sort)|How many shifts will it take Insertion Sort to sort an array?|[C++](arrays-and-sorting/insertion-sort.cpp) [Python](arrays-and-sorting/insertion-sort.py)|Advanced
#### [Search](https://www.hackerrank.com/domains/algorithms/search)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Hackerland Radio Transmitters](https://www.hackerrank.com/challenges/hackerland-radio-transmitters)|Find the minimum number of radio transmitters needed to cover all the houses in Hackerland!|[Python](search/hackerland-radio-transmitters.py)|Medium
[Ice Cream Parlor](https://www.hackerrank.com/challenges/icecream-parlor)|Help Sunny and Johnny spend all their money during each trip to the Ice Cream Parlor.|[Python](search/icecream-parlor.py)|Easy
[Missing Numbers](https://www.hackerrank.com/challenges/missing-numbers)|Find the numbers missing from a sequence given a permutation of the original sequence|[Python](search/missing-numbers.py)|Easy
[Pairs](https://www.hackerrank.com/challenges/pairs)|Given N numbers, count the total pairs of numbers that have a difference of K.|[Python](search/pairs.py)|Medium
[Sherlock and Array](https://www.hackerrank.com/challenges/sherlock-and-array)|Check whether there exists an element in the array such that sum of elements on its left is equal to the sum of elements on its right.|[Python](search/sherlock-and-array.py)|Easy
#### [Greedy](https://www.hackerrank.com/domains/algorithms/greedy)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Minimum Absolute Difference in an Array](https://www.hackerrank.com/challenges/minimum-absolute-difference-in-an-array)|Given a list of integers, calculate their differences and find the difference with the smallest absolute value.|[Python](greedy/minimum-absolute-difference-in-an-array.py)|Easy
[Marc's Cakewalk](https://www.hackerrank.com/challenges/marcs-cakewalk)|Find the minimum number of miles Marc must walk to burn the calories consumed from eating cupcakes.|[Python](greedy/marcs-cakewalk.py)|Easy
[Grid Challenge](https://www.hackerrank.com/challenges/grid-challenge)|Find if it is possible to rearrange a square grid such that every row and every column is lexicographically sorted.|[Python](greedy/grid-challenge.py)|Easy
[Maximum Perimeter Triangle](https://www.hackerrank.com/challenges/maximum-perimeter-triangle)|Find the triangle having the maximum perimeter.|[C++](greedy/maximum-perimeter-triangle.cpp)|Easy
[Beautiful Pairs](https://www.hackerrank.com/challenges/beautiful-pairs)|Change an element of B and calculate the number of pairwise disjoint beautiful pairs.|[Python](greedy/beautiful-pairs.py)|Easy
[Sherlock and The Beast](https://www.hackerrank.com/challenges/sherlock-and-the-beast)|Find the largest number following some rules having N digits.|[Python](greedy/sherlock-and-the-beast.py)|Easy
#### [Dynamic Programming](https://www.hackerrank.com/domains/algorithms/dynamic-programming)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Fibonacci Modified](https://www.hackerrank.com/challenges/fibonacci-modified)|Compute the nth term of a Fibonacci sequence.|[Python](dynamic-programming/fibonacci-modified.py)|Medium
[The Longest Increasing Subsequence](https://www.hackerrank.com/challenges/longest-increasing-subsequent)|Find the length of the longest increase subsequence in a given array.|[C++](dynamic-programming/longest-increasing-subsequent.cpp)|Advanced
[Dorsey Thief](https://www.hackerrank.com/challenges/dorsey-thief)|standard knapsack problem|[C++](dynamic-programming/dorsey-thief.cpp)|Advanced
#### [Constructive Algorithms](https://www.hackerrank.com/domains/algorithms/constructive-algorithms)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[New Year Chaos](https://www.hackerrank.com/challenges/new-year-chaos)|Determine how many bribes took place to get a queue into its current state.|[Python](constructive-algorithms/new-year-chaos.py)|Medium
#### [Bit Manipulation](https://www.hackerrank.com/domains/algorithms/bit-manipulation)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Lonely Integer](https://www.hackerrank.com/challenges/lonely-integer)|Find the unique element in an array of integer pairs.|[Python](bit-manipulation/lonely-integer.py)|Easy
[Maximizing XOR](https://www.hackerrank.com/challenges/maximizing-xor)|Given two integers, L and R, find the maximal value of A xor B, where A and B satisfy a condition.|[Python](bit-manipulation/maximizing-xor.py)|Easy
[Counter game](https://www.hackerrank.com/challenges/counter-game)|Louise and Richard play a game, find the winner of the game.|[Python](bit-manipulation/counter-game.py)|Medium
[The Great XOR](https://www.hackerrank.com/challenges/the-great-xor)|Count the number of non-negative integer a's that are less than some x where the bitwise XOR of a and x is greater than x.|[C++](bit-manipulation/the-great-xor.cpp)|Medium
[Flipping bits](https://www.hackerrank.com/challenges/flipping-bits)|Flip bits in its binary representation.|[C++](bit-manipulation/flipping-bits.cpp)|Easy
[A or B](https://www.hackerrank.com/challenges/aorb)|A or B = C|[Python](bit-manipulation/aorb.py)|Medium
[Hamming Distance](https://www.hackerrank.com/challenges/hamming-distance)|You are given a string S, consisting of N small latin letters 'a' and 'b'. Process the given M queries.|[C++](bit-manipulation/hamming-distance.cpp)|Expert
#### [Game Theory](https://www.hackerrank.com/domains/algorithms/game-theory)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Game of Stones](https://www.hackerrank.com/challenges/game-of-stones-1)|A Game of Stones|[Python](game-theory/game-of-stones-1.py)|Easy
#### [Debugging](https://www.hackerrank.com/domains/algorithms/algo-debugging)
Name | Preview | Code | Difficulty
---- | ------- | ---- | ----------
[Prime Dates](https://www.hackerrank.com/challenges/prime-date)|Find the number of prime dates in a range|[C++](algo-debugging/prime-date.cpp) [Java](algo-debugging/prime-date.java) [Python](algo-debugging/prime-date.py)|Medium
[Zig Zag Sequence](https://www.hackerrank.com/challenges/zig-zag-sequence)|Find a zig zag sequence of the given array.|[C++](algo-debugging/zig-zag-sequence.cpp) [Java](algo-debugging/zig-zag-sequence.java) [Python](algo-debugging/zig-zag-sequence.py)|Medium
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
// Algorithms > Dynamic Programming > Dorsey Thief
// standard knapsack problem
//
// https://www.hackerrank.com/challenges/dorsey-thief/problem
// https://www.hackerrank.com/contests/101nov13/challenges/dorsey-thief
//
#include <bits/stdc++.h>
using namespace std;
unsigned long knapSack(unsigned long W, const vector<unsigned long>& wt, const vector<unsigned long>& val, size_t n)
{
vector<unsigned long> dp(W + 1);
for (size_t i = 0; i < n; ++i)
{
for (unsigned long j = W; j >= wt[i]; --j)
{
if (dp[j - wt[i]] != 0 || j == wt[i])
dp[j] = max(dp[j], dp[j - wt[i]] + val[i]);
}
}
return dp[W];
}
int main()
{
std::ios::sync_with_stdio(false);
size_t n;
unsigned long W;
vector<unsigned long> wt, val;
cin >> n >> W;
wt.resize(n);
val.resize(n);
for (size_t i = 0; i < n; ++i)
cin >> val[i] >> wt[i];
unsigned long r = knapSack(W, wt, val, n);
if (r == 0)
cout << "Got caught!" << endl;
else
cout << r << endl;
return 0;
}
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
# Algorithms > Dynamic Programming > Fibonacci Modified
# Compute the nth term of a Fibonacci sequence.
#
# https://www.hackerrank.com/challenges/fibonacci-modified/problem
# https://www.hackerrank.com/contests/back2school14/challenges/fibonacci-modified
#
t1, t2, n = map(int, input().split())
for i in range(3, n + 1):
t2, t1 = t1 + t2 ** 2, t2
print(t2)
| {
"repo_name": "rene-d/hackerrank",
"stars": "65",
"repo_language": "Python",
"file_name": "README.md",
"mime_type": "text/plain"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.