I'm trying to use jenkinsPipelineUnit to test a JenkinsFile that exists in the same git repository as my shared libraries. This Jenkinsfile references shared libraries located in src. It appears that I must commit my shared library changes before I can test them even if I use localSource from within the retriever.
How can I load my shared libraries and unit test them without committing the code first?
Here is my current code that doesn't work:
def library = library().name('pipeline-utils')
.defaultVersion("master")
.allowOverride(true)
.implicit(false)
.targetPath(sharedLibs)
.retriever(localSource(sharedLibs))
.build()
helper.registerSharedLibrary(library)
try {
def script = runScript("pipelines/test.groovy")
}
I get this error:
file:/Users/<myuserid>/git/pipelines/test.groovy: 2:
Error on loading library pipeline-utils#myteam/pipelineUnitTest :
Directory /Users/<myuserid>/git/out/test/classes/com/company/test/pipeline-utils#myteam/pipelineUnitTest does not exists # line 2, column 1.
#Library("pipeline-utils#myteam/pipelineUnitTest") _
This isn't as easy as it sounds. JenkinsPipelineUnit isn't moving any more since one year while some interesting work is waiting on pull-requests. Here are the steps I had to go through to get this working locally, but also on jenkins where the name of my repository directory can be different each time.
1. Create a custom version of JenkinsPipelineUnit
I started from https://github.com/jenkinsci/JenkinsPipelineUnit/pull/75 but had to add some other changes. These are all the changes:
diff --git a/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LibraryConfiguration.groovy b/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LibraryConfiguration.groovy
index f4eeb17..dc13b9c 100644
--- a/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LibraryConfiguration.groovy
+++ b/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LibraryConfiguration.groovy
## -18,7 +18,7 ## class LibraryConfiguration {
String targetPath
LibraryConfiguration validate() {
- if (name && defaultVersion && retriever && targetPath)
+ if (name && retriever && targetPath && ((retriever instanceof LocalSource || defaultVersion)))
return this
throw new IllegalStateException("LibraryConfiguration is not properly initialized ${this.toString()}")
}
diff --git a/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LibraryLoader.groovy b/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LibraryLoader.groovy
index 120a316..a253f2d 100644
--- a/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LibraryLoader.groovy
+++ b/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LibraryLoader.groovy
## -117,11 +117,14 ## class LibraryLoader {
}
private static boolean matches(String libName, String version, LibraryConfiguration libraryDescription) {
+ if (libraryDescription.allowOverride) {
+ return true
+ }
if (libraryDescription.name == libName) {
if (version == null) {
return true
}
- if (libraryDescription.allowOverride || libraryDescription.defaultVersion == version) {
+ if (libraryDescription.defaultVersion == version) {
return true
}
}
diff --git a/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LocalSource.groovy b/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LocalSource.groovy
index 61babde..4edca23 100644
--- a/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LocalSource.groovy
+++ b/src/main/groovy/com/lesfurets/jenkins/unit/global/lib/LocalSource.groovy
## -11,7 +11,13 ## class LocalSource implements SourceRetriever {
#Override
List<URL> retrieve(String repository, String branch, String targetPath) {
- def sourceDir = new File(sourceURL).toPath().resolve("$repository#$branch").toFile()
+ def sourceURLPath = new File(sourceURL).toPath()
+ def sourceDir
+ if (branch) {
+ sourceDir = sourceURLPath.resolve("$repository#$branch").toFile()
+ } else {
+ sourceDir = sourceURLPath.resolve(repository).toFile()
+ }
if (sourceDir.exists()) {
return [sourceDir.toURI().toURL()]
}
2. Register your current repository directory as your shared library
Inspired from: https://github.com/jimcroft/jenkinslib-example/blob/master/test/com/example/TestCase1.groovy
in your TestClass.groovy:
void setup() {
String repositoryDirectoryName = FilenameUtils.getName(System.getProperty("user.dir"))
String dirPath = new File( System.getProperty("user.dir") )
.getAbsoluteFile()
.getParentFile()
.getAbsolutePath()
// This next call bypasses registerSharedLibrary; to allow registering a library with a different directory name
helper.libraries.put('my-jenkins-library', library(repositoryDirectoryName)
.allowOverride(true)
.implicit(false)
.targetPath(dirPath)
.retriever(localSource(dirPath))
.build())
Related
I need to write a custom webpack plugin which will process newly added svg files in the src code in watchMode or recompile mode. But in my plugin compiler.modifiedFiles does not have the path of newly added files. It only contains the path till the folder where file is created. When I modify existing file it contains the filepath. In compiler, I found only modifiedFiles and removedFiles apis.
I am using webpack5.
Please let me know how can I get the path of newly added File in recompile scenario.
Here is sample code I am using
compiler.hooks.thisCompilation.tap( PLUGIN_NAME, ( compilation ) => {
compilation.hooks.finishModules.tapAsync( PLUGIN_NAME,
async( modules, callback ) => {
const modifiedSVGFiles = this.getModifiedSVGFiles( compilation.compiler );
this.getModifiedSVGFiles function
getModifiedSVGFiles( compiler ) {
const watchMode = compiler.watchMode;
let modifiedSVGFiles = {};
if( watchMode ) {
const modifiedFiles = compiler.modifiedFiles;
modifiedFiles && modifiedFiles.forEach( ( key, value ) => {
if( value.endsWith( '.svg' ) ) {
const key = `image/${basename( value )}`;
if( !modifiedSVGFiles[ key ] ) {
modifiedSVGFiles[ key ] = value;
}
}
} );
}
return modifiedSVGFiles;
}
Thanks
Prasad
fn pre_dispatch(
self,
who: &Self::AccountId,
call: &Self::Call,
info: &DispatchInfoOf<Self::Call>,
len: usize
) -> Result<Self::Pre, TransactionValidityError> {
let (_fee, imbalance) = self.withdraw_fee(who, call, info, len)?;
Ok((self.0, who.clone(), imbalance))
}
(The above code is copied from txn-payment-pallet)
Here can we get function name and parameters from the call(one of the parameter), And based on the function-name , pallet , parameters passed by user, I want to compute fee.
For example , If it is call from pallet-staking::bond(x : amount_of_tokens_to_be_bonded) and I want to set fee for txn based on x.
Is that possible??
Like wise I want to set fee based on function-call parameters entered by user.
You can, but it requires a bit of type juggling to do so.
First, you need to realize that type Call = T::Call; in ChargeTransactionPayment. Looking at trait Config in pallet_transaction_payment, no type Call can be seen there. Instead, this type is coming from frame_system::Config (which is the super-trait of all pallets).
A brief look at the top level runtime aggregator file unravels that this Call type is essentially the outer-call of the runtime, an enum that encapsulates the call of all pallets.
That being said, the main point here is that from within pallet_transaction_payment, we cannot know of this outer-call contains this particular call from staking or not. To do so, you need to enforce this assumption via a new trait bound, namely IsSubType. This trait is specifically made to convert from wrapping type (like the outer-call) into its inner variants. See an example of this type being implemented for node_runtime's Call type.
Applying the following diff to substrate master should do exactly what you want .
diff --git a/Cargo.lock b/Cargo.lock
index ea54adf99e..df66185163 100644
--- a/Cargo.lock
+++ b/Cargo.lock
## -6074,6 +6074,7 ## dependencies = [
"frame-support",
"frame-system",
"pallet-balances",
+ "pallet-staking",
"parity-scale-codec",
"scale-info",
"serde",
diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml
index 1d3066e39f..0e705514bb 100644
--- a/frame/transaction-payment/Cargo.toml
+++ b/frame/transaction-payment/Cargo.toml
## -27,6 +27,7 ## sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primit
frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" }
frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" }
+pallet-staking = { version = "4.0.0-dev", default-features = false, path = "../staking" }
[dev-dependencies]
serde_json = "1.0.68"
## -44,5 +45,6 ## std = [
"sp-std/std",
"frame-support/std",
"frame-system/std",
+ "pallet-staking/std",
]
try-runtime = ["frame-support/try-runtime"]
diff --git a/frame/transaction-payment/src/lib.rs b/frame/transaction-payment/src/lib.rs
index 59d94a8237..3b0803663d 100644
--- a/frame/transaction-payment/src/lib.rs
+++ b/frame/transaction-payment/src/lib.rs
## -251,7 +251,7 ## pub mod pallet {
pub struct Pallet<T>(_);
#[pallet::config]
- pub trait Config: frame_system::Config {
+ pub trait Config: frame_system::Config + pallet_staking::Config {
/// Handler for withdrawing, refunding and depositing the transaction fee.
/// Transaction fees are withdrawn before the transaction is executed.
/// After the transaction was executed the transaction weight can be
## -696,7 +696,8 ## impl<T: Config> sp_std::fmt::Debug for ChargeTransactionPayment<T> {
impl<T: Config> SignedExtension for ChargeTransactionPayment<T>
where
BalanceOf<T>: Send + Sync + From<u64> + FixedPointOperand,
- T::Call: Dispatchable<Info = DispatchInfo, PostInfo = PostDispatchInfo>,
+ T::Call: Dispatchable<Info = DispatchInfo, PostInfo = PostDispatchInfo>
+ + frame_support::traits::IsSubType<pallet_staking::Call<T>>,
{
const IDENTIFIER: &'static str = "ChargeTransactionPayment";
type AccountId = T::AccountId;
## -736,8 +737,15 ## where
info: &DispatchInfoOf<Self::Call>,
len: usize,
) -> Result<Self::Pre, TransactionValidityError> {
- let (_fee, imbalance) = self.withdraw_fee(who, call, info, len)?;
- Ok((self.0, who.clone(), imbalance))
+ use frame_support::traits::IsSubType;
+ if let Some(pallet_staking::Call::bond_extra { .. }) = call.is_sub_type() {
+ // skip
+ todo!()
+ } else {
+ // default impl
+ let (_fee, imbalance) = self.withdraw_fee(who, call, info, len)?;
+ Ok((self.0, who.clone(), imbalance))
+ }
}
fn post_dispatch(
Note that this approach is implying that the pallet_staking::Config be present in the runtime, which is not aligned with the modularity of Frame, and ergo is not implemented. If you want to have this feature, as of now, the only way is to fork pallet_transaction_payment and customize it a bit for your runtime.
Am worked in XCart 4.4 version Framework,
Please help me How to send clean Url when send mail in Xcart.
Now in database :
Name content like :
eml_someone_ask_question_at
My Mail content value like :
I have Someone asked a question about {{product_name}} at {{STOREFRONT}}/product.php?productid={{productid}}
In my mail template page like
{$lng.eml_someone_ask_question_at|substitute:"STOREFRONT":$current_location:"productid":$productid:"product_name":$product}
Now I need to change clean url when sending before mail,
For example.
Linkpassedlike
In the above url clean url passed like,
above link need to changed like
I need to change this url when send before email please anyone help me.
1) The call chain looks like
func_send_mail->func_display('your_mail_template')->func_clean_url_filter_output->func_clean_url_product_callback->func_clean_url_get
Add an additional param like this
func_send_mail->func_display('your_mail_template',...,$new_url)->func_clean_url_filter_output(,...,$new_url)->func_clean_url_product_callback(,...,$new_url)->func_clean_url_get(,...,$new_url)
And use the $new_url URL in the func_clean_url_get instead of original one.
The function func_display may be called from the func_send_mail function
The call looks like
$mail_message = func_display($body_template,$mail_smarty,false);
2)Another solution is simply to change it in the xcart_clean_urls table.
3)Another solution
Apply the patch
diff -ru include/func/func.core.php include/func/func.core.php
--- include/func/func.core.php 2012-01-13 11:44:16.000000000 +0400
+++ include/func/func.core.php 2018-04-09 12:29:32.293262983 +0400
## -833,7 +833,7 ##
/**
* Smarty->display wrapper
*/
-function func_display($tpl, &$templater, $to_display = true, $is_intermediate = false)
+function func_display($tpl, &$templater, $to_display = true, $is_intermediate = false, $skip_output_filter = false)
{
global $config;
global $predefined_lng_variables, $override_lng_code, $shop_language, $user_agent, $__smarty_time, $__smarty_size;
## -1006,7 +1006,7 ##
$templater->register_outputfilter('func_postprocess_output');
if (func_constant('AREA_TYPE') == 'C') {
- if ($config['SEO']['clean_urls_enabled'] == 'Y')
+ if ($config['SEO']['clean_urls_enabled'] == 'Y' && !$skip_output_filter)
$templater->register_outputfilter('func_clean_url_filter_output');
if ($config['General']['use_cached_templates'] != 'Y')
diff -ru include/func/func.mail.php include/func/func.mail.php
--- include/func/func.mail.php 2012-01-10 16:27:54.000000000 +0400
+++ include/func/func.mail.php 2018-04-09 12:30:30.042523154 +0400
## -270,7 +270,8 ##
if ($config['Email']['html_mail'] != 'Y')
$mail_smarty->assign('plain_text_message', 1);
- $mail_message = func_display($body_template,$mail_smarty,false);
+ $_skip_output_filter = strpos($body_template, 'ask_question.tpl') !== false;
+ $mail_message = func_display($body_template,$mail_smarty,false, false, $_skip_output_filter);
if (X_DEF_OS_WINDOWS) {
$mail_message = preg_replace("/(?<!\r)\n/S", "\r\n", $mail_message);
And change the eml_someone_ask_question_at language variable.
Can somebody help setup this?
In my instance (RHEL), installed Varnish it work well.
Then setup varnish-devicedetect,
yum list installed | grep varnish
varnish.x86_64 3.0.5-1.16.amzn1 #amzn-main
varnish-libs.x86_64 3.0.5-1.16.amzn1 #amzn-main
varnish-release.noarch 4.0-3.el6 installed
When I tried add any code example to defaul.vcl, Varnish fails to start. This code is OK:
include "devicedetect.vcl";
sub vcl_recv {
call devicedetect;
}
But after this Varnish fails to start:
sub vcl_backend_response {
if (bereq.http.X-UA-Device) {
if (!beresp.http.Vary) { # no Vary at all
set beresp.http.Vary = "X-UA-Device";
} elsif (beresp.http.Vary !~ "X-UA-Device") { # add to existing Vary
set beresp.http.Vary = beresp.http.Vary + ", X-UA-Device";
}
}
# comment this out if you don't want the client to know your classification
set beresp.http.X-UA-Device = bereq.http.X-UA-Device;
}
Tried, even empty:
sub vcl_backend_response {
}
Caused same problem.
What I missed?
It seems found answer: need use rules from Varnish ver. 3, used this set:
sub vcl_fetch {
if (req.http.X-UA-Device) {
if (!beresp.http.Vary) { # no Vary at all
set beresp.http.Vary = "X-UA-Device";
} elseif (beresp.http.Vary !~ "X-UA-Device") { # add to existing Vary
set beresp.http.Vary = beresp.http.Vary + ", X-UA-Device";
}
}
set beresp.http.X-UA-Device = req.http.X-UA-Device;
}
sub vcl_deliver {
if ((req.http.X-UA-Device) && (resp.http.Vary)) {
set resp.http.Vary = regsub(resp.http.Vary, "X-UA-Device", "User-Agent");
}
}
Found here: https://www.varnish-cache.org/docs/3.0/tutorial/devicedetection.html
You could have found the error by trying to compile your vcl:
varnishd -C -f default.vcl (or whatever the path is to your vcl file)
This will tell you if your vcl has valid syntax or not - which will let varnish start without failure.
We needed to automate testing that all of the C#, C++, & VB.NET samples we ship compile properly. We need it to build all files without our listing each one. Listing each one means if someone forgets to add a new one (which will happen someday), explicit calls will miss it. By walking all .sln files, we always get everything.
Doing this is pretty easy:
Install the samples on a clean VM (that we revert back to the snapshot for each test run).
Create a BuildAll.proj (MSBuild) file that calls all the .sln files installed.
Use MSBuild to run the generated BuildAll.proj file
Step 2 requires a means to generate the BuildAll.proj file. Is there any way to tell MSBuild to run all .sln files under a sub-directory or to create a BuildAll.proj that calls all the underlying .slnl files?
This PowerShell script will restore all NuGet packages and build all solutions recursively in the current directory. Make sure nuget.exe and msbuild are in your PATH.
$baseDir = (Get-Item -Path ".\" -Verbose).FullName
$items = Get-ChildItem -Path $baseDir -Include *.sln -Recurse
foreach ($item in $items){
nuget restore $item
msbuild $item
}
You can use the following PowerShell for .NET Core projects
$baseDir = (Get-Item -Path ".\" -Verbose).FullName
Write-Host ("Scanning *.sln files in " + $baseDir)
$solutionPaths = Get-ChildItem -Path $baseDir -Include *.sln -Recurse
Write-Host ("Total found: " + $solutionPaths.Count)
foreach ($solutionPath in $solutionPaths) {
Write-Host ("Building => " + $solutionPath)
dotnet build $solutionPath
}
We couldn't find anything so we wrote a program that creates a BuildAll.proj that calls all .sln files under a directory. Full solution is at Windward Wrocks (my blog).
The code is:
using System;
using System.IO;
using System.Text;
using System.Xml;
using System.Xml.Linq;
namespace BuildDotNetTestScript
{
/// <summary>
/// Builds a test script to compile all .sln files under the directory in.
/// </summary>
public class Program
{
private static readonly XNamespace xmlns = "http://schemas.microsoft.com/developer/msbuild/2003";
private enum VS_VER
{
VS_2005,
VS_2008,
VS_2010,
NONE
}
private static VS_VER vsVersion = VS_VER.NONE;
/// <summary>
/// Build TestAll.proj for all .sln files in this directory and sub-directories.
/// </summary>
/// <param name="args">Optional: [-VS2005 | -VS2008 | -VS2010] TestAll.proj root_folder</param>
public static void Main(string[] args)
{
int indexArgs = 0;
if (args.Length >= 1 && args[0][0] == '-')
{
indexArgs = 1;
switch (args[0].ToUpper().Trim())
{
case "-VS2005":
vsVersion = VS_VER.VS_2005;
break;
case "-VS2008":
vsVersion = VS_VER.VS_2008;
break;
case "-VS2010":
vsVersion = VS_VER.VS_2010;
break;
default:
Console.Error.WriteLine("Only options are -VS2005, -VS2008, or -VS2010");
Environment.Exit(1);
return;
}
}
string projFile = Path.GetFullPath(args.Length > indexArgs ? args[indexArgs] : "TestAll.proj");
string rootDirectory =
Path.GetFullPath(args.Length > indexArgs + 1 ? args[indexArgs + 1] : Directory.GetCurrentDirectory());
Console.Out.WriteLine(string.Format("Creating project file {0}", projFile));
Console.Out.WriteLine(string.Format("Root directory {0}", rootDirectory));
XDocument xdoc = new XDocument();
XElement elementProject = new XElement(xmlns + "Project");
xdoc.Add(elementProject);
elementProject.Add(new XAttribute("DefaultTargets", "compile"));
elementProject.Add(new XAttribute("ToolsVersion", "3.5"));
XElement elementPropertyGroup = new XElement(xmlns + "PropertyGroup");
elementProject.Add(elementPropertyGroup);
XElement elementDevEnv = new XElement(xmlns + "devenv");
elementPropertyGroup.Add(elementDevEnv);
elementDevEnv.Value = "devenv.exe";
XElement elementTarget = new XElement(xmlns + "Target");
elementProject.Add(elementTarget);
elementTarget.Add(new XAttribute("Name", "compile"));
// add .sln files - recursively
AddSlnFiles(elementTarget, rootDirectory, rootDirectory);
Console.Out.WriteLine("writing project file to disk");
// no BOM
using (var writer = new XmlTextWriter(projFile, new UTF8Encoding(false)))
{
writer.Formatting = Formatting.Indented;
xdoc.Save(writer);
}
Console.Out.WriteLine("all done");
}
private static void AddSlnFiles(XElement elementTarget, string rootDirectory, string folder)
{
// add .sln files
foreach (string fileOn in Directory.GetFiles(folder, "*.sln"))
{
// .../JS/... is VS2005
bool isJSharp = fileOn.ToUpper().Replace('\\', '/').Contains("/JS/");
bool versionMatch = true;
switch (vsVersion)
{
case VS_VER.VS_2005:
if ((!fileOn.ToUpper().Contains("VS2005")) && (! isJSharp))
versionMatch = false;
break;
case VS_VER.VS_2008:
if (isJSharp || !fileOn.ToUpper().Contains("VS2008"))
versionMatch = false;
break;
case VS_VER.VS_2010:
if (isJSharp || !fileOn.ToUpper().Contains("VS2010"))
versionMatch = false;
break;
default:
if (isJSharp || fileOn.ToUpper().Contains("VS2005") || fileOn.ToUpper().Contains("VS2008") || fileOn.ToUpper().Contains("VS2010"))
versionMatch = false;
break;
}
if (!versionMatch)
continue;
string command = string.Format("\"$(devenv)\" \"{0}\" /Rebuild", Path.GetFileName(fileOn));
XElement elementExec = new XElement(xmlns + "Exec");
elementExec.Add(new XAttribute("Command", command));
string workingFolder;
if (folder.StartsWith(rootDirectory))
{
workingFolder = folder.Substring(rootDirectory.Length).Trim();
if ((workingFolder.Length > 0) && (workingFolder[0] == Path.DirectorySeparatorChar || workingFolder[0] == Path.AltDirectorySeparatorChar))
workingFolder = workingFolder.Substring(1);
}
else
workingFolder = folder;
if (workingFolder.Length > 0)
elementExec.Add(new XAttribute("WorkingDirectory", workingFolder));
elementTarget.Add(elementExec);
}
// look in sub-directories
foreach (string subDirectory in Directory.GetDirectories(folder))
AddSlnFiles(elementTarget, rootDirectory, subDirectory);
}
}
}