repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
Osmose/moseamp
|
musicplayer/plugins/openmptplugin/openmpt/soundlib/Load_xm.cpp
|
<reponame>Osmose/moseamp
/*
* Load_xm.cpp
* -----------
* Purpose: XM (FastTracker II) module loader / saver
* Notes : (currently none)
* Authors: <NAME>
* OpenMPT Devs
* The OpenMPT source code is released under the BSD license. Read LICENSE for more details.
*/
#include "stdafx.h"
#include "Loaders.h"
#include "../common/version.h"
#include "XMTools.h"
#ifndef MODPLUG_NO_FILESAVE
#include "../common/mptFileIO.h"
#endif
#include <algorithm>
#ifdef MODPLUG_TRACKER
#include "../mptrack/TrackerSettings.h" // For super smooth ramping option
#endif // MODPLUG_TRACKER
OPENMPT_NAMESPACE_BEGIN
// Allocate samples for an instrument
static std::vector<SAMPLEINDEX> AllocateXMSamples(CSoundFile &sndFile, SAMPLEINDEX numSamples)
{
LimitMax(numSamples, SAMPLEINDEX(32));
std::vector<SAMPLEINDEX> foundSlots;
foundSlots.reserve(numSamples);
for(SAMPLEINDEX i = 0; i < numSamples; i++)
{
SAMPLEINDEX candidateSlot = sndFile.GetNumSamples() + 1;
if(candidateSlot >= MAX_SAMPLES)
{
// If too many sample slots are needed, try to fill some empty slots first.
for(SAMPLEINDEX j = 1; j <= sndFile.GetNumSamples(); j++)
{
if(sndFile.GetSample(j).HasSampleData())
{
continue;
}
if(std::find(foundSlots.begin(), foundSlots.end(), j) == foundSlots.end())
{
// Empty sample slot that is not occupied by the current instrument. Yay!
candidateSlot = j;
// Remove unused sample from instrument sample assignments
for(INSTRUMENTINDEX ins = 1; ins <= sndFile.GetNumInstruments(); ins++)
{
if(sndFile.Instruments[ins] == nullptr)
{
continue;
}
for(auto &sample : sndFile.Instruments[ins]->Keyboard)
{
if(sample == candidateSlot)
{
sample = 0;
}
}
}
break;
}
}
}
if(candidateSlot >= MAX_SAMPLES)
{
// Still couldn't find any empty sample slots, so look out for existing but unused samples.
std::vector<bool> usedSamples;
SAMPLEINDEX unusedSampleCount = sndFile.DetectUnusedSamples(usedSamples);
if(unusedSampleCount > 0)
{
sndFile.RemoveSelectedSamples(usedSamples);
// Remove unused samples from instrument sample assignments
for(INSTRUMENTINDEX ins = 1; ins <= sndFile.GetNumInstruments(); ins++)
{
if(sndFile.Instruments[ins] == nullptr)
{
continue;
}
for(auto &sample : sndFile.Instruments[ins]->Keyboard)
{
if(sample < usedSamples.size() && !usedSamples[sample])
{
sample = 0;
}
}
}
// New candidate slot is first unused sample slot.
candidateSlot = static_cast<SAMPLEINDEX>(std::find(usedSamples.begin() + 1, usedSamples.end(), false) - usedSamples.begin());
} else
{
// No unused sampel slots: Give up :(
break;
}
}
if(candidateSlot < MAX_SAMPLES)
{
foundSlots.push_back(candidateSlot);
if(candidateSlot > sndFile.GetNumSamples())
{
sndFile.m_nSamples = candidateSlot;
}
}
}
return foundSlots;
}
// Read .XM patterns
static void ReadXMPatterns(FileReader &file, const XMFileHeader &fileHeader, CSoundFile &sndFile)
{
// Reading patterns
sndFile.Patterns.ResizeArray(fileHeader.patterns);
for(PATTERNINDEX pat = 0; pat < fileHeader.patterns; pat++)
{
FileReader::off_t curPos = file.GetPosition();
uint32 headerSize = file.ReadUint32LE();
file.Skip(1); // Pack method (= 0)
ROWINDEX numRows = 64;
if(fileHeader.version == 0x0102)
{
numRows = file.ReadUint8() + 1;
} else
{
numRows = file.ReadUint16LE();
}
// A packed size of 0 indicates a completely empty pattern.
const uint16 packedSize = file.ReadUint16LE();
if(numRows == 0 || numRows > MAX_PATTERN_ROWS)
{
numRows = 64;
}
file.Seek(curPos + headerSize);
FileReader patternChunk = file.ReadChunk(packedSize);
if(!sndFile.Patterns.Insert(pat, numRows) || packedSize == 0)
{
continue;
}
enum PatternFlags
{
isPackByte = 0x80,
allFlags = 0xFF,
notePresent = 0x01,
instrPresent = 0x02,
volPresent = 0x04,
commandPresent = 0x08,
paramPresent = 0x10,
};
for(auto &m : sndFile.Patterns[pat])
{
uint8 info = patternChunk.ReadUint8();
uint8 vol = 0;
if(info & isPackByte)
{
// Interpret byte as flag set.
if(info & notePresent) m.note = patternChunk.ReadUint8();
} else
{
// Interpret byte as note, read all other pattern fields as well.
m.note = info;
info = allFlags;
}
if(info & instrPresent) m.instr = patternChunk.ReadUint8();
if(info & volPresent) vol = patternChunk.ReadUint8();
if(info & commandPresent) m.command = patternChunk.ReadUint8();
if(info & paramPresent) m.param = patternChunk.ReadUint8();
if(m.note == 97)
{
m.note = NOTE_KEYOFF;
} else if(m.note > 0 && m.note < 97)
{
m.note += 12;
} else
{
m.note = NOTE_NONE;
}
if(m.command | m.param)
{
CSoundFile::ConvertModCommand(m);
} else
{
m.command = CMD_NONE;
}
if(m.instr == 0xFF)
{
m.instr = 0;
}
if(vol >= 0x10 && vol <= 0x50)
{
m.volcmd = VOLCMD_VOLUME;
m.vol = vol - 0x10;
} else if (vol >= 0x60)
{
// Volume commands 6-F translation.
static const ModCommand::VOLCMD volEffTrans[] =
{
VOLCMD_VOLSLIDEDOWN, VOLCMD_VOLSLIDEUP, VOLCMD_FINEVOLDOWN, VOLCMD_FINEVOLUP,
VOLCMD_VIBRATOSPEED, VOLCMD_VIBRATODEPTH, VOLCMD_PANNING, VOLCMD_PANSLIDELEFT,
VOLCMD_PANSLIDERIGHT, VOLCMD_TONEPORTAMENTO,
};
m.volcmd = volEffTrans[(vol - 0x60) >> 4];
m.vol = vol & 0x0F;
if(m.volcmd == VOLCMD_PANNING)
{
m.vol *= 4; // FT2 does indeed not scale panning symmetrically.
}
}
}
}
}
enum TrackerVersions
{
verUnknown = 0x00, // Probably not made with MPT
verOldModPlug = 0x01, // Made with MPT Alpha / Beta
verNewModPlug = 0x02, // Made with MPT (not Alpha / Beta)
verModPlug1_09 = 0x04, // Made with MPT 1.09 or possibly other version
verOpenMPT = 0x08, // Made with OpenMPT
verConfirmed = 0x10, // We are very sure that we found the correct tracker version.
verFT2Generic = 0x20, // "FastTracker v2.00", but FastTracker has NOT been ruled out
verOther = 0x40, // Something we don't know, testing for DigiTrakker.
verFT2Clone = 0x80, // NOT FT2: itype changed between instruments, or \0 found in song title
verDigiTrakker = 0x100, // Probably DigiTrakker
verUNMO3 = 0x200, // TODO: UNMO3-ed XMs are detected as MPT 1.16
verEmptyOrders = 0x400, // Allow empty order list like in OpenMPT (FT2 just plays pattern 0 if the order list is empty according to the header)
};
DECLARE_FLAGSET(TrackerVersions)
static bool ValidateHeader(const XMFileHeader &fileHeader)
{
if(fileHeader.channels == 0
|| fileHeader.channels > MAX_BASECHANNELS
|| std::memcmp(fileHeader.signature, "Extended Module: ", 17)
)
{
return false;
}
return true;
}
static uint64 GetHeaderMinimumAdditionalSize(const XMFileHeader &fileHeader)
{
return fileHeader.orders + 4 * (fileHeader.patterns + fileHeader.instruments);
}
CSoundFile::ProbeResult CSoundFile::ProbeFileHeaderXM(MemoryFileReader file, const uint64 *pfilesize)
{
XMFileHeader fileHeader;
if(!file.ReadStruct(fileHeader))
{
return ProbeWantMoreData;
}
if(!ValidateHeader(fileHeader))
{
return ProbeFailure;
}
return ProbeAdditionalSize(file, pfilesize, GetHeaderMinimumAdditionalSize(fileHeader));
}
bool CSoundFile::ReadXM(FileReader &file, ModLoadingFlags loadFlags)
{
file.Rewind();
XMFileHeader fileHeader;
if(!file.ReadStruct(fileHeader))
{
return false;
}
if(!ValidateHeader(fileHeader))
{
return false;
}
if(!file.CanRead(mpt::saturate_cast<FileReader::off_t>(GetHeaderMinimumAdditionalSize(fileHeader))))
{
return false;
} else if(loadFlags == onlyVerifyHeader)
{
return true;
}
InitializeGlobals(MOD_TYPE_XM);
InitializeChannels();
m_nMixLevels = mixLevelsCompatible;
FlagSet<TrackerVersions> madeWith(verUnknown);
mpt::ustring madeWithTracker;
if(!memcmp(fileHeader.trackerName, "FastTracker ", 12))
{
if(fileHeader.size == 276 && !memcmp(fileHeader.trackerName + 12, "v2.00 ", 8))
{
if(fileHeader.version < 0x0104)
madeWith = verFT2Generic | verConfirmed;
else if(memchr(fileHeader.songName, '\0', 20) != nullptr)
// FT2 pads the song title with spaces, some other trackers use null chars
madeWith = verFT2Clone | verNewModPlug | verEmptyOrders;
else
madeWith = verFT2Generic | verNewModPlug;
} else if(!memcmp(fileHeader.trackerName + 12, "v 2.00 ", 8))
{
// MPT 1.0 (exact version to be determined later)
madeWith = verOldModPlug;
} else
{
// ???
madeWith.set(verConfirmed);
madeWithTracker = U_("FastTracker Clone");
}
} else
{
// Something else!
madeWith = verUnknown | verConfirmed;
mpt::String::Read<mpt::String::spacePadded>(madeWithTracker, mpt::CharsetCP437, fileHeader.trackerName);
if(!memcmp(fileHeader.trackerName, "OpenMPT ", 8))
{
madeWith = verOpenMPT | verConfirmed | verEmptyOrders;
} else if(!memcmp(fileHeader.trackerName, "MilkyTracker ", 12))
{
// MilkyTracker prior to version 0.90.87 doesn't set a version string.
// Luckily, starting with v0.90.87, MilkyTracker also implements the FT2 panning scheme.
if(memcmp(fileHeader.trackerName + 12, " ", 8))
{
m_nMixLevels = mixLevelsCompatibleFT2;
}
} else if(!memcmp(fileHeader.trackerName, "MadTracker 2.0\0", 15))
{
// Fix channel 2 in m3_cha.xm
m_playBehaviour.reset(kFT2PortaNoNote);
// Fix arpeggios in kragle_-_happy_day.xm
m_playBehaviour.reset(kFT2Arpeggio);
} else if(!memcmp(fileHeader.trackerName, "Skale Tracker\0", 14))
{
m_playBehaviour.reset(kFT2OffsetOutOfRange);
} else if(!memcmp(fileHeader.trackerName, "*Converted ", 11))
{
madeWith = verDigiTrakker;
}
}
mpt::String::Read<mpt::String::spacePadded>(m_songName, fileHeader.songName);
m_nMinPeriod = 1;
m_nMaxPeriod = 31999;
Order().SetRestartPos(fileHeader.restartPos);
m_nChannels = fileHeader.channels;
m_nInstruments = std::min<uint16>(fileHeader.instruments, MAX_INSTRUMENTS - 1u);
if(fileHeader.speed)
m_nDefaultSpeed = fileHeader.speed;
if(fileHeader.tempo)
m_nDefaultTempo.Set(Clamp<uint16, uint16>(fileHeader.tempo, 32, 512));
m_SongFlags.reset();
m_SongFlags.set(SONG_LINEARSLIDES, (fileHeader.flags & XMFileHeader::linearSlides) != 0);
m_SongFlags.set(SONG_EXFILTERRANGE, (fileHeader.flags & XMFileHeader::extendedFilterRange) != 0);
if(m_SongFlags[SONG_EXFILTERRANGE] && madeWith == (verFT2Generic | verNewModPlug))
{
madeWith = verFT2Clone | verNewModPlug | verConfirmed;
}
ReadOrderFromFile<uint8>(Order(), file, fileHeader.orders);
if(fileHeader.orders == 0 && !madeWith[verEmptyOrders])
{
// Fix lamb_-_dark_lighthouse.xm, which only contains one pattern and an empty order list
Order().assign(1, 0);
}
file.Seek(fileHeader.size + 60);
if(fileHeader.version >= 0x0104)
{
ReadXMPatterns(file, fileHeader, *this);
}
// In case of XM versions < 1.04, we need to memorize the sample flags for all samples, as they are not stored immediately after the sample headers.
std::vector<SampleIO> sampleFlags;
uint8 sampleReserved = 0;
int instrType = -1;
// Reading instruments
for(INSTRUMENTINDEX instr = 1; instr <= m_nInstruments; instr++)
{
// First, try to read instrument header length...
uint32 headerSize = file.ReadUint32LE();
if(headerSize == 0)
{
headerSize = sizeof(XMInstrumentHeader);
}
// Now, read the complete struct.
file.SkipBack(4);
XMInstrumentHeader instrHeader;
file.ReadStructPartial(instrHeader, headerSize);
// Time for some version detection stuff.
if(madeWith == verOldModPlug)
{
madeWith.set(verConfirmed);
if(instrHeader.size == 245)
{
// ModPlug Tracker Alpha
m_dwLastSavedWithVersion = MAKE_VERSION_NUMERIC(1, 00, 00, A5);
madeWithTracker = U_("ModPlug Tracker 1.0 alpha");
} else if(instrHeader.size == 263)
{
// ModPlug Tracker Beta (Beta 1 still behaves like Alpha, but Beta 3.3 does it this way)
m_dwLastSavedWithVersion = MAKE_VERSION_NUMERIC(1, 00, 00, B3);
madeWithTracker = U_("ModPlug Tracker 1.0 beta");
} else
{
// WTF?
madeWith = (verUnknown | verConfirmed);
}
} else if(instrHeader.numSamples == 0)
{
// Empty instruments make tracker identification pretty easy!
if(instrHeader.size == 263 && instrHeader.sampleHeaderSize == 0 && madeWith[verNewModPlug])
madeWith.set(verConfirmed);
else if(instrHeader.size != 29 && madeWith[verDigiTrakker])
madeWith.reset(verDigiTrakker);
else if(madeWith[verFT2Clone | verFT2Generic] && instrHeader.size != 33)
{
// Sure isn't FT2.
// Note: FT2 NORMALLY writes shdr=40 for all samples, but sometimes it
// just happens to write random garbage there instead. Surprise!
// Note: 4-mat's eternity.xm has an instrument header size of 29.
madeWith = verUnknown;
}
}
if(AllocateInstrument(instr) == nullptr)
{
continue;
}
instrHeader.ConvertToMPT(*Instruments[instr]);
if(instrType == -1)
{
instrType = instrHeader.type;
} else if(instrType != instrHeader.type && madeWith[verFT2Generic])
{
// FT2 writes some random junk for the instrument type field,
// but it's always the SAME junk for every instrument saved.
madeWith.reset(verFT2Generic);
madeWith.set(verFT2Clone);
}
if(instrHeader.numSamples > 0)
{
// Yep, there are some samples associated with this instrument.
if((instrHeader.instrument.midiEnabled | instrHeader.instrument.midiChannel | instrHeader.instrument.midiProgram | instrHeader.instrument.muteComputer) != 0)
{
// Definitely not an old MPT.
madeWith.reset(verOldModPlug | verNewModPlug);
}
// Read sample headers
std::vector<SAMPLEINDEX> sampleSlots = AllocateXMSamples(*this, instrHeader.numSamples);
// Update sample assignment map
for(size_t k = 0 + 12; k < 96 + 12; k++)
{
if(Instruments[instr]->Keyboard[k] < sampleSlots.size())
{
Instruments[instr]->Keyboard[k] = sampleSlots[Instruments[instr]->Keyboard[k]];
}
}
if(fileHeader.version >= 0x0104)
{
sampleFlags.clear();
}
// Need to memorize those if we're going to skip any samples...
std::vector<uint32> sampleSize(instrHeader.numSamples);
// Early versions of Sk@le Tracker set instrHeader.sampleHeaderSize = 0 (IFULOVE.XM)
// cybernostra weekend has instrHeader.sampleHeaderSize = 0x12, which would leave out the sample name, but FT2 still reads the name.
MPT_ASSERT(instrHeader.sampleHeaderSize == 0 || instrHeader.sampleHeaderSize == sizeof(XMSample));
for(SAMPLEINDEX sample = 0; sample < instrHeader.numSamples; sample++)
{
XMSample sampleHeader;
file.ReadStruct(sampleHeader);
sampleFlags.push_back(sampleHeader.GetSampleFormat());
sampleSize[sample] = sampleHeader.length;
sampleReserved |= sampleHeader.reserved;
if(sample < sampleSlots.size())
{
SAMPLEINDEX mptSample = sampleSlots[sample];
sampleHeader.ConvertToMPT(Samples[mptSample]);
instrHeader.instrument.ApplyAutoVibratoToMPT(Samples[mptSample]);
mpt::String::Read<mpt::String::spacePadded>(m_szNames[mptSample], sampleHeader.name);
if((sampleHeader.flags & 3) == 3 && madeWith[verNewModPlug])
{
// MPT 1.09 and maybe newer / older versions set both loop flags for bidi loops.
madeWith.set(verModPlug1_09);
}
}
}
// Read samples
if(fileHeader.version >= 0x0104)
{
for(SAMPLEINDEX sample = 0; sample < instrHeader.numSamples; sample++)
{
// Sample 15 in dirtysex.xm by J/M/T/M is a 16-bit sample with an odd size of 0x18B according to the header, while the real sample size would be 0x18A.
// Always read as many bytes as specified in the header, even if the sample reader would probably read less bytes.
FileReader sampleChunk = file.ReadChunk(sampleFlags[sample].GetEncoding() != SampleIO::ADPCM ? sampleSize[sample] : (16 + (sampleSize[sample] + 1) / 2));
if(sample < sampleSlots.size() && (loadFlags & loadSampleData))
{
sampleFlags[sample].ReadSample(Samples[sampleSlots[sample]], sampleChunk);
}
}
}
}
}
if(sampleReserved == 0 && madeWith[verNewModPlug] && memchr(fileHeader.songName, '\0', sizeof(fileHeader.songName)) != nullptr)
{
// Null-terminated song name: Quite possibly MPT. (could really be an MPT-made file resaved in FT2, though)
madeWith.set(verConfirmed);
}
if(fileHeader.version < 0x0104)
{
// Load Patterns and Samples (Version 1.02 and 1.03)
if(loadFlags & (loadPatternData | loadSampleData))
{
ReadXMPatterns(file, fileHeader, *this);
}
if(loadFlags & loadSampleData)
{
for(SAMPLEINDEX sample = 1; sample <= GetNumSamples(); sample++)
{
sampleFlags[sample - 1].ReadSample(Samples[sample], file);
}
}
}
// Read song comments: "text"
if(file.ReadMagic("text"))
{
m_songMessage.Read(file, file.ReadUint32LE(), SongMessage::leCR);
madeWith.set(verConfirmed);
}
// Read midi config: "MIDI"
bool hasMidiConfig = false;
if(file.ReadMagic("MIDI"))
{
file.ReadStructPartial<MIDIMacroConfigData>(m_MidiCfg, file.ReadUint32LE());
m_MidiCfg.Sanitize();
hasMidiConfig = true;
madeWith.set(verConfirmed);
}
// Read pattern names: "PNAM"
if(file.ReadMagic("PNAM"))
{
const PATTERNINDEX namedPats = std::min(static_cast<PATTERNINDEX>(file.ReadUint32LE() / MAX_PATTERNNAME), Patterns.Size());
for(PATTERNINDEX pat = 0; pat < namedPats; pat++)
{
char patName[MAX_PATTERNNAME];
file.ReadString<mpt::String::maybeNullTerminated>(patName, MAX_PATTERNNAME);
Patterns[pat].SetName(patName);
}
madeWith.set(verConfirmed);
}
// Read channel names: "CNAM"
if(file.ReadMagic("CNAM"))
{
const CHANNELINDEX namedChans = std::min(static_cast<CHANNELINDEX>(file.ReadUint32LE() / MAX_CHANNELNAME), GetNumChannels());
for(CHANNELINDEX chn = 0; chn < namedChans; chn++)
{
file.ReadString<mpt::String::maybeNullTerminated>(ChnSettings[chn].szName, MAX_CHANNELNAME);
}
madeWith.set(verConfirmed);
}
// Read mix plugins information
if(file.CanRead(8))
{
FileReader::off_t oldPos = file.GetPosition();
LoadMixPlugins(file);
if(file.GetPosition() != oldPos)
{
madeWith.set(verConfirmed);
}
}
if(madeWith[verConfirmed])
{
if(madeWith[verModPlug1_09])
{
m_dwLastSavedWithVersion = MAKE_VERSION_NUMERIC(1, 09, 00, 00);
madeWithTracker = U_("ModPlug Tracker 1.09");
} else if(madeWith[verNewModPlug])
{
m_dwLastSavedWithVersion = MAKE_VERSION_NUMERIC(1, 16, 00, 00);
madeWithTracker = U_("ModPlug Tracker 1.10 - 1.16");
}
}
if(!memcmp(fileHeader.trackerName, "OpenMPT ", 8))
{
// Hey, I know this tracker!
std::string mptVersion(fileHeader.trackerName + 8, 12);
m_dwLastSavedWithVersion = Version::Parse(mpt::ToUnicode(mpt::CharsetASCII, mptVersion));
madeWith = verOpenMPT | verConfirmed;
if(m_dwLastSavedWithVersion < MAKE_VERSION_NUMERIC(1, 22, 07, 19))
m_nMixLevels = mixLevelsCompatible;
else
m_nMixLevels = mixLevelsCompatibleFT2;
}
if(m_dwLastSavedWithVersion && !madeWith[verOpenMPT])
{
m_nMixLevels = mixLevelsOriginal;
m_playBehaviour.reset();
}
if(madeWith[verFT2Generic])
{
m_nMixLevels = mixLevelsCompatibleFT2;
if(!hasMidiConfig)
{
// FT2 allows typing in arbitrary unsupported effect letters such as Zxx.
// Prevent these commands from being interpreted as filter commands by erasing the default MIDI Config.
m_MidiCfg.ClearZxxMacros();
}
if(fileHeader.version >= 0x0104 // Old versions of FT2 didn't have (smooth) ramping. Disable it for those versions where we can be sure that there should be no ramping.
#ifdef MODPLUG_TRACKER
&& TrackerSettings::Instance().autoApplySmoothFT2Ramping
#endif // MODPLUG_TRACKER
)
{
// apply FT2-style super-soft volume ramping
m_playBehaviour.set(kFT2VolumeRamping);
}
}
if(madeWithTracker.empty())
{
if(madeWith[verDigiTrakker] && sampleReserved == 0 && (instrType ? instrType : -1) == -1)
{
madeWithTracker = U_("DigiTrakker");
} else if(madeWith[verFT2Generic])
{
madeWithTracker = U_("FastTracker 2 or compatible");
} else
{
madeWithTracker = U_("Unknown");
}
}
bool isOpenMPTMade = false; // specific for OpenMPT 1.17+
if(GetNumInstruments())
{
isOpenMPTMade = LoadExtendedInstrumentProperties(file);
}
LoadExtendedSongProperties(file, true, &isOpenMPTMade);
if(isOpenMPTMade && m_dwLastSavedWithVersion < MAKE_VERSION_NUMERIC(1, 17, 00, 00))
{
// Up to OpenMPT 1.17.02.45 (r165), it was possible that the "last saved with" field was 0
// when saving a file in OpenMPT for the first time.
m_dwLastSavedWithVersion = MAKE_VERSION_NUMERIC(1, 17, 00, 00);
}
if(m_dwLastSavedWithVersion >= MAKE_VERSION_NUMERIC(1, 17, 00, 00))
{
madeWithTracker = U_("OpenMPT ") + m_dwLastSavedWithVersion.ToUString();
}
// We no longer allow any --- or +++ items in the order list now.
if(m_dwLastSavedWithVersion && m_dwLastSavedWithVersion < MAKE_VERSION_NUMERIC(1, 22, 02, 02))
{
if(!Patterns.IsValidPat(0xFE))
Order().RemovePattern(0xFE);
if(!Patterns.IsValidPat(0xFF))
Order().Replace(0xFF, Order.GetInvalidPatIndex());
}
m_modFormat.formatName = mpt::format(U_("FastTracker 2 v%1.%2"))(fileHeader.version >> 8, mpt::ufmt::hex0<2>(fileHeader.version & 0xFF));
m_modFormat.type = U_("xm");
m_modFormat.madeWithTracker = std::move(madeWithTracker);
m_modFormat.charset = m_dwLastSavedWithVersion ? mpt::CharsetWindows1252 : mpt::CharsetCP437;
return true;
}
#ifndef MODPLUG_NO_FILESAVE
#define str_tooMuchPatternData ("Warning: File format limit was reached. Some pattern data may not get written to file.")
#define str_pattern ("pattern")
bool CSoundFile::SaveXM(std::ostream &f, bool compatibilityExport)
{
bool addChannel = false; // avoid odd channel count for FT2 compatibility
XMFileHeader fileHeader;
MemsetZero(fileHeader);
memcpy(fileHeader.signature, "Extended Module: ", 17);
mpt::String::Write<mpt::String::spacePadded>(fileHeader.songName, m_songName);
fileHeader.eof = 0x1A;
const std::string openMptTrackerName = mpt::ToCharset(GetCharsetFile(), Version::Current().GetOpenMPTVersionString());
mpt::String::Write<mpt::String::spacePadded>(fileHeader.trackerName, openMptTrackerName);
// Writing song header
fileHeader.version = 0x0104; // XM Format v1.04
fileHeader.size = sizeof(XMFileHeader) - 60; // minus everything before this field
fileHeader.restartPos = Order().GetRestartPos();
fileHeader.channels = m_nChannels;
if((m_nChannels % 2u) && m_nChannels < 32)
{
// Avoid odd channel count for FT2 compatibility
fileHeader.channels++;
addChannel = true;
} else if(compatibilityExport && fileHeader.channels > 32)
{
fileHeader.channels = 32;
}
// Find out number of orders and patterns used.
// +++ and --- patterns are not taken into consideration as FastTracker does not support them.
const ORDERINDEX trimmedLength = Order().GetLengthTailTrimmed();
std::vector<uint8> orderList(trimmedLength);
const ORDERINDEX orderLimit = compatibilityExport ? 256 : uint16_max;
ORDERINDEX numOrders = 0;
PATTERNINDEX numPatterns = Patterns.GetNumPatterns();
bool changeOrderList = false;
for(ORDERINDEX ord = 0; ord < trimmedLength; ord++)
{
PATTERNINDEX pat = Order()[ord];
if(pat == Order.GetIgnoreIndex() || pat == Order.GetInvalidPatIndex() || pat > uint8_max)
{
changeOrderList = true;
} else if(numOrders < orderLimit)
{
orderList[numOrders++] = static_cast<uint8>(pat);
if(pat >= numPatterns)
numPatterns = pat + 1;
}
}
if(changeOrderList)
{
AddToLog("Skip and stop order list items (+++ and ---) are not saved in XM files.");
}
orderList.resize(compatibilityExport ? 256 : numOrders);
fileHeader.orders = numOrders;
fileHeader.patterns = numPatterns;
fileHeader.size += static_cast<uint32>(orderList.size());
uint16 writeInstruments;
if(m_nInstruments > 0)
fileHeader.instruments = writeInstruments = m_nInstruments;
else
fileHeader.instruments = writeInstruments = m_nSamples;
if(m_SongFlags[SONG_LINEARSLIDES]) fileHeader.flags |= XMFileHeader::linearSlides;
if(m_SongFlags[SONG_EXFILTERRANGE] && !compatibilityExport) fileHeader.flags |= XMFileHeader::extendedFilterRange;
fileHeader.flags = fileHeader.flags;
// Fasttracker 2 will happily accept any tempo faster than 255 BPM. XMPlay does also support this, great!
fileHeader.tempo = mpt::saturate_cast<uint16>(m_nDefaultTempo.GetInt());
fileHeader.speed = static_cast<uint16>(Clamp(m_nDefaultSpeed, 1u, 31u));
mpt::IO::Write(f, fileHeader);
// Write processed order list
mpt::IO::WriteRaw(f, orderList.data(), orderList.size());
// Writing patterns
#define ASSERT_CAN_WRITE(x) \
if(len > s.size() - x) /*Buffer running out? Make it larger.*/ \
s.resize(s.size() + 10 * 1024, 0);
std::vector<uint8> s(64 * 64 * 5, 0);
for(PATTERNINDEX pat = 0; pat < numPatterns; pat++)
{
uint8 patHead[9] = { 0 };
patHead[0] = 9;
if(!Patterns.IsValidPat(pat))
{
// There's nothing to write... chicken out.
patHead[5] = 64;
mpt::IO::Write(f, patHead);
continue;
}
const uint16 numRows = mpt::saturate_cast<uint16>(Patterns[pat].GetNumRows());
patHead[5] = static_cast<uint8>(numRows & 0xFF);
patHead[6] = static_cast<uint8>(numRows >> 8);
auto p = Patterns[pat].cbegin();
size_t len = 0;
// Empty patterns are always loaded as 64-row patterns in FT2, regardless of their real size...
bool emptyPattern = true;
for(size_t j = m_nChannels * numRows; j > 0; j--, p++)
{
// Don't write more than 32 channels
if(compatibilityExport && m_nChannels - ((j - 1) % m_nChannels) > 32) continue;
uint8 note = p->note;
uint8 command = p->command, param = p->param;
ModSaveCommand(command, param, true, compatibilityExport);
if (note >= NOTE_MIN_SPECIAL) note = 97; else
if ((note <= 12) || (note > 96+12)) note = 0; else
note -= 12;
uint8 vol = 0;
if (p->volcmd != VOLCMD_NONE)
{
switch(p->volcmd)
{
case VOLCMD_VOLUME: vol = 0x10 + p->vol; break;
case VOLCMD_VOLSLIDEDOWN: vol = 0x60 + (p->vol & 0x0F); break;
case VOLCMD_VOLSLIDEUP: vol = 0x70 + (p->vol & 0x0F); break;
case VOLCMD_FINEVOLDOWN: vol = 0x80 + (p->vol & 0x0F); break;
case VOLCMD_FINEVOLUP: vol = 0x90 + (p->vol & 0x0F); break;
case VOLCMD_VIBRATOSPEED: vol = 0xA0 + (p->vol & 0x0F); break;
case VOLCMD_VIBRATODEPTH: vol = 0xB0 + (p->vol & 0x0F); break;
case VOLCMD_PANNING: vol = 0xC0 + (p->vol / 4); if (vol > 0xCF) vol = 0xCF; break;
case VOLCMD_PANSLIDELEFT: vol = 0xD0 + (p->vol & 0x0F); break;
case VOLCMD_PANSLIDERIGHT: vol = 0xE0 + (p->vol & 0x0F); break;
case VOLCMD_TONEPORTAMENTO: vol = 0xF0 + (p->vol & 0x0F); break;
}
// Those values are ignored in FT2. Don't save them, also to avoid possible problems with other trackers (or MPT itself)
if(compatibilityExport && p->vol == 0)
{
switch(p->volcmd)
{
case VOLCMD_VOLUME:
case VOLCMD_PANNING:
case VOLCMD_VIBRATODEPTH:
case VOLCMD_TONEPORTAMENTO:
case VOLCMD_PANSLIDELEFT: // Doesn't have memory, but does weird things with zero param.
break;
default:
// no memory here.
vol = 0;
}
}
}
// no need to fix non-empty patterns
if(!p->IsEmpty())
emptyPattern = false;
// Apparently, completely empty patterns are loaded as empty 64-row patterns in FT2, regardless of their original size.
// We have to avoid this, so we add a "break to row 0" command in the last row.
if(j == 1 && emptyPattern && numRows != 64)
{
command = 0x0D;
param = 0;
}
if ((note) && (p->instr) && (vol > 0x0F) && (command) && (param))
{
s[len++] = note;
s[len++] = p->instr;
s[len++] = vol;
s[len++] = command;
s[len++] = param;
} else
{
uint8 b = 0x80;
if (note) b |= 0x01;
if (p->instr) b |= 0x02;
if (vol >= 0x10) b |= 0x04;
if (command) b |= 0x08;
if (param) b |= 0x10;
s[len++] = b;
if (b & 1) s[len++] = note;
if (b & 2) s[len++] = p->instr;
if (b & 4) s[len++] = vol;
if (b & 8) s[len++] = command;
if (b & 16) s[len++] = param;
}
if(addChannel && (j % m_nChannels == 1 || m_nChannels == 1))
{
ASSERT_CAN_WRITE(1);
s[len++] = 0x80;
}
ASSERT_CAN_WRITE(5);
}
if(emptyPattern && numRows == 64)
{
// Be smart when saving empty patterns!
len = 0;
}
// Reaching the limits of file format?
if(len > uint16_max)
{
AddToLog(mpt::format("%1 (%2 %3)")(str_tooMuchPatternData, str_pattern, pat));
len = uint16_max;
}
patHead[7] = static_cast<uint8>(len & 0xFF);
patHead[8] = static_cast<uint8>(len >> 8);
mpt::IO::Write(f, patHead);
if(len) mpt::IO::WriteRaw(f, s.data(), len);
}
#undef ASSERT_CAN_WRITE
// Check which samples are referenced by which instruments (for assigning unreferenced samples to instruments)
std::vector<bool> sampleAssigned(GetNumSamples() + 1, false);
for(INSTRUMENTINDEX ins = 1; ins <= GetNumInstruments(); ins++)
{
if(Instruments[ins] != nullptr)
{
Instruments[ins]->GetSamples(sampleAssigned);
}
}
// Writing instruments
for(INSTRUMENTINDEX ins = 1; ins <= writeInstruments; ins++)
{
XMInstrumentHeader insHeader;
std::vector<SAMPLEINDEX> samples;
if(GetNumInstruments())
{
if(Instruments[ins] != nullptr)
{
// Convert instrument
insHeader.ConvertToXM(*Instruments[ins], compatibilityExport);
samples = insHeader.instrument.GetSampleList(*Instruments[ins], compatibilityExport);
if(samples.size() > 0 && samples[0] <= GetNumSamples())
{
// Copy over auto-vibrato settings of first sample
insHeader.instrument.ApplyAutoVibratoToXM(Samples[samples[0]], GetType());
}
std::vector<SAMPLEINDEX> additionalSamples;
// Try to save "instrument-less" samples as well by adding those after the "normal" samples of our sample.
// We look for unassigned samples directly after the samples assigned to our current instrument, so if
// e.g. sample 1 is assigned to instrument 1 and samples 2 to 10 aren't assigned to any instrument,
// we will assign those to sample 1. Any samples before the first referenced sample are going to be lost,
// but hey, I wrote this mostly for preserving instrument texts in existing modules, where we shouldn't encounter this situation...
for(auto smp : samples)
{
while(++smp <= GetNumSamples()
&& !sampleAssigned[smp]
&& insHeader.numSamples < (compatibilityExport ? 16 : 32))
{
sampleAssigned[smp] = true; // Don't want to add this sample again.
additionalSamples.push_back(smp);
insHeader.numSamples++;
}
}
samples.insert(samples.end(), additionalSamples.begin(), additionalSamples.end());
} else
{
MemsetZero(insHeader);
}
} else
{
// Convert samples to instruments
MemsetZero(insHeader);
insHeader.numSamples = 1;
insHeader.instrument.ApplyAutoVibratoToXM(Samples[ins], GetType());
samples.push_back(ins);
}
insHeader.Finalise();
size_t insHeaderSize = insHeader.size;
mpt::IO::WritePartial(f, insHeader, insHeaderSize);
std::vector<SampleIO> sampleFlags(samples.size());
// Write Sample Headers
for(SAMPLEINDEX smp = 0; smp < samples.size(); smp++)
{
XMSample xmSample;
if(samples[smp] <= GetNumSamples())
{
xmSample.ConvertToXM(Samples[samples[smp]], GetType(), compatibilityExport);
} else
{
MemsetZero(xmSample);
}
sampleFlags[smp] = xmSample.GetSampleFormat();
mpt::String::Write<mpt::String::spacePadded>(xmSample.name, m_szNames[samples[smp]]);
mpt::IO::Write(f, xmSample);
}
// Write Sample Data
for(SAMPLEINDEX smp = 0; smp < samples.size(); smp++)
{
if(samples[smp] <= GetNumSamples())
{
sampleFlags[smp].WriteSample(f, Samples[samples[smp]]);
}
}
}
if(!compatibilityExport)
{
// Writing song comments
if(!m_songMessage.empty())
{
uint32 size = mpt::saturate_cast<uint32>(m_songMessage.length());
mpt::IO::WriteRaw(f, "text", 4);
mpt::IO::WriteIntLE<uint32>(f, size);
mpt::IO::WriteRaw(f, m_songMessage.c_str(), size);
}
// Writing midi cfg
if(!m_MidiCfg.IsMacroDefaultSetupUsed())
{
mpt::IO::WriteRaw(f, "MIDI", 4);
mpt::IO::WriteIntLE<uint32>(f, sizeof(MIDIMacroConfigData));
mpt::IO::Write(f, static_cast<MIDIMacroConfigData &>(m_MidiCfg));
}
// Writing Pattern Names
const PATTERNINDEX numNamedPats = Patterns.GetNumNamedPatterns();
if(numNamedPats > 0)
{
mpt::IO::WriteRaw(f, "PNAM", 4);
mpt::IO::WriteIntLE<uint32>(f, numNamedPats * MAX_PATTERNNAME);
for(PATTERNINDEX pat = 0; pat < numNamedPats; pat++)
{
char name[MAX_PATTERNNAME];
mpt::String::Write<mpt::String::maybeNullTerminated>(name, Patterns[pat].GetName());
mpt::IO::Write(f, name);
}
}
// Writing Channel Names
{
CHANNELINDEX numNamedChannels = 0;
for(CHANNELINDEX chn = 0; chn < m_nChannels; chn++)
{
if (ChnSettings[chn].szName[0]) numNamedChannels = chn + 1;
}
// Do it!
if(numNamedChannels)
{
mpt::IO::WriteRaw(f, "CNAM", 4);
mpt::IO::WriteIntLE<uint32>(f, numNamedChannels * MAX_CHANNELNAME);
for(CHANNELINDEX chn = 0; chn < numNamedChannels; chn++)
{
char name[MAX_CHANNELNAME];
mpt::String::Write<mpt::String::maybeNullTerminated>(name, ChnSettings[chn].szName);
mpt::IO::Write(f, name);
}
}
}
//Save hacked-on extra info
SaveMixPlugins(&f);
if(GetNumInstruments())
{
SaveExtendedInstrumentProperties(writeInstruments, f);
}
SaveExtendedSongProperties(f);
}
return true;
}
#endif // MODPLUG_NO_FILESAVE
OPENMPT_NAMESPACE_END
|
DuncanWalter/card-engine
|
src/cards/status/memoryLeak.js
|
<reponame>DuncanWalter/card-engine
import { defineCard, Card, PlayArgs } from '../card'
import { defineEffect } from '../../effects/effect'
import { ExhaustCard } from '../../events/exhaustCard'
import { AddToHand } from '../../events/addToHand'
import { DrawCards } from '../../events/drawCards'
import { BindEnergy } from '../../events/bindEnergy'
const Drain = defineEffect(
'drain',
{
name: 'Drain',
innerColor: '#ee8866',
outerColor: '#bb3322',
description: 'On draw, lose 1 energy.',
sides: 3,
rotation: 0.5,
},
{
stacked: false,
delta: (x) => x,
min: 1,
max: 1,
},
(owner) => ({
subjects: [owner],
tags: [DrawCards],
type: AddToHand,
}),
(owner, type) =>
function*({ resolver, game }) {
yield resolver.processEvent(
new BindEnergy(owner, game.player, {
quantity: -1,
})
)
},
[AddToHand]
)
export const MemoryLeak = defineCard(
'memoryLeak',
function*(self: Card<>, { energy }: PlayArgs) {
return { energy }
},
{
energy: undefined,
},
{
color: '111122',
text: '#[Unplayable]. On draw, lose 1 energy.',
title: 'Memory Leak',
},
[Drain, 1]
)
|
AntonLantukh/2020-03-otus-java-lantukh
|
hw07-atmDepartment/src/main/java/ru/otus/lantukh/atm/Dispenser.java
|
package ru.otus.lantukh.atm;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
public class Dispenser {
private List<Integer> nominalValues;
private HashMap<Integer, CashCell> vault;
public Dispenser() {
initialize();
}
public void initialize() {
List<Integer> nominalValues = Nominal.getNominals();
HashMap<Integer, CashCell> newVault = new HashMap<>();
for(int cell: nominalValues) {
newVault.put(cell, new CashCell(1000));
}
this.vault = newVault;
this.nominalValues = nominalValues;
}
Map<Integer, CashCell> getVault() {
return vault;
}
public CashCell getCashCell(int nominal) {
return getVault().get(nominal);
}
public void updateCashCell(int nominal, int count) {
CashCell cell = getCashCell(nominal);
cell.setCount(count);
}
public void depositCash (Map<Integer, Integer> amount) {
for (HashMap.Entry<Integer, Integer> entry : amount.entrySet()) {
Integer nominal = entry.getKey();
Integer count = entry.getValue();
updateCashCell(nominal, count);
}
}
public Map<Integer, Integer> withdrawCash(int amount) {
HashMap<Integer, Integer> dispensedCash = new HashMap<>(nominalValues.size());
for (int cell : nominalValues) {
int count = amount / cell;
int rest = amount % cell;
// Дополнительно проверяем, что деньги в ячейке есть
if (count > 0 && getCashCell(cell).isAvailable(count)) {
dispensedCash.put(cell, count);
amount = rest;
}
}
// Не хватило денег на выдачу
if (amount != 0) {
throw new DispenseException();
}
return dispensedCash;
}
public int getBalance() {
return getVault()
.entrySet()
.stream()
.mapToInt((item) -> item.getKey() * item.getValue().getCount())
.sum();
}
}
|
coderMaruf/leetcode-1
|
No_0215_Kth Largest Element in an Array/by_sort.py
|
<filename>No_0215_Kth Largest Element in an Array/by_sort.py
'''
Description:
Find the kth largest element in an unsorted array. Note that it is the kth largest element in the sorted order, not the kth distinct element.
Example 1:
Input: [3,2,1,5,6,4] and k = 2
Output: 5
Example 2:
Input: [3,2,3,1,2,4,5,5,6] and k = 4
Output: 4
Note:
You may assume k is always valid, 1 ≤ k ≤ array's length.
'''
from typing import List
class Solution:
def findKthLargest(self, nums: List[int], k: int) -> int:
nums.sort( reverse = True )
return nums[k-1]
# n : the length of input list, nums
## Time Complexity: O( n log n )
#
# The overhead in time is the cost of timsort in python, which is of O( n log n).
## Space Compleixty: O( 1 )
#
# The overhead in time is the loop index, which is of O( 1 )
from collections import namedtuple
TestEntry = namedtuple('TestEnry', 'sequence k')
def test_bench():
test_data = [
TestEntry( sequence = [3,2,1,5,6,4], k = 2),
TestEntry( sequence = [3,2,3,1,2,4,5,5,6], k = 4),
TestEntry( sequence = [3,2,1,5,6,4], k = 1),
TestEntry( sequence = [3,2,1,5,6,4], k = 6),
]
# expected output:
'''
5
4
6
1
'''
for t in test_data:
print( Solution().findKthLargest( nums = t.sequence, k =t.k) )
return
if __name__ == '__main__':
test_bench()
|
cragkhit/elasticsearch
|
references/bcb_chosen_clones/selected#312174#26#118.java
|
<reponame>cragkhit/elasticsearch<filename>references/bcb_chosen_clones/selected#312174#26#118.java
public void fetch(Util util) throws ConversationException {
List<Integer> positions = new ArrayList<Integer>();
setUpgradeable(false);
setFoodShortage(false);
Pattern p;
Matcher m;
String page = util.httpGetPage(getUrlString());
int pageLimit = page.length();
p = Pattern.compile("soon_link");
m = p.matcher(page);
if (m.find()) {
pageLimit = m.start();
}
p = Pattern.compile("(?s)(?i)<h2>(?:\\d+\\.\\s*)?(.*?)</h2>");
m = p.matcher(page);
m.region(0, pageLimit);
while (m.find()) {
String buildingTypeString = m.group(1);
String name = buildingTypeString;
ConstructionData item = new ConstructionData(name, getUrlString(), getTranslator());
item.setCurrentLevel(0);
this.buildings.add(item);
BuildingType buildingType = BuildingType.fromKey(getTranslator().getKeyword(buildingTypeString));
item.setType(buildingType);
if ((desiredBuildingType != null && desiredBuildingType.equals(buildingType)) || isOneChoiceOnly()) {
super.setConstructionData(item);
}
positions.add(new Integer(m.start()));
}
int lastPos = 0;
int nextPos = pageLimit;
for (ConstructionData item : buildings) {
for (ResourceType resourceType : ResourceType.values()) {
int res = resourceType.toInt() + 1;
p = Pattern.compile("(?s)<img .*? src=\".*?img/un/r/" + res + ".gif\"[^>]*>(\\d\\d*) \\|");
m = p.matcher(page);
m.region(lastPos, pageLimit);
if (m.find()) {
String stringNumber = m.group(1);
lastPos = m.end();
try {
item.setNeededResource(resourceType, Integer.parseInt(stringNumber));
} catch (NumberFormatException e) {
throw new ConversationException("Invalid number for \"" + this.getName() + "\": " + stringNumber);
}
}
}
}
lastPos = 0;
for (int i = 0; i < positions.size(); i++) {
lastPos = positions.get(i).intValue();
if (i == positions.size() - 1) {
nextPos = pageLimit;
} else {
nextPos = positions.get(i + 1).intValue();
}
p = Pattern.compile(String.format("(?s)(?i)<span class=\"c\">%s[^>]*</span>", translator.getMatchingPattern(Translator.FOOD_SHORTAGE)));
m = p.matcher(page);
m.region(lastPos, nextPos);
if (!m.find()) {
p = Pattern.compile("(?s)<img .*? src=\".*?img/un/a/clock.gif\"[^>]*>([^<]*)<");
m = p.matcher(page);
m.region(lastPos, nextPos);
if (!m.find()) {
p = Pattern.compile("(?s)<img[^>]*class=\"clock\"[^>]*>([^<]*)");
m = p.matcher(page);
m.region(lastPos, page.length());
}
if (m.find()) {
String timeString = m.group(1).trim();
buildings.get(i).setSecondsForNextLevel(Util.timeToSeconds(timeString));
} else {
throw new ConversationException("Can't find time to complete " + this.getName());
}
} else {
buildings.get(i).setUpgradeable(false);
buildings.get(i).setFoodShortage(true);
}
if (!buildings.get(i).isUpgradeable()) {
buildings.get(i).setSubmitUrlString(null);
return;
}
p = Pattern.compile("(?s)<a href=\"(dorf\\d\\.php\\?.*?)\">");
m = p.matcher(page);
m.region(lastPos, nextPos);
if (m.find()) {
String submitUrlString = m.group(1);
buildings.get(i).setSubmitUrlString(Util.getFullUrl(this.getUrlString(), submitUrlString));
} else {
buildings.get(i).setSubmitUrlString(null);
}
}
}
|
lechium/tvOS124Headers
|
Applications/PineBoard/PBAppDepot.h
|
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import "NSObject.h"
#import "BBObserverDelegate.h"
#import "FBSApplicationDataStoreObserver.h"
#import "FBUIApplicationServiceDelegate.h"
#import "MCProfileConnectionObserver.h"
#import "PBSAppDepotProxyProtocol.h"
#import "UISApplicationStateServiceDelegate.h"
@class BBObserver, FBSApplicationDataStoreMonitor, NSArray, NSDictionary, NSMutableDictionary, NSString, PBKioskAppConfiguration, UISApplicationStateService;
@interface PBAppDepot : NSObject <FBUIApplicationServiceDelegate, UISApplicationStateServiceDelegate, BBObserverDelegate, MCProfileConnectionObserver, FBSApplicationDataStoreObserver, PBSAppDepotProxyProtocol>
{
// Error parsing type: AB, name: _needsReload
// Error parsing type: AB, name: _needsNotifyAppStateDidChange
UISApplicationStateService *_uiAppStateService; // 16 = 0x10
_Bool _enforceProvisioningOnSystemAppsEnabled; // 24 = 0x18
NSMutableDictionary *_internalAppState; // 32 = 0x20
NSArray *_internalProvisionedAppIdentifiers; // 40 = 0x28
NSArray *_appWhitelistOptions; // 48 = 0x30
PBKioskAppConfiguration *_internalKioskAppConfiguration; // 56 = 0x38
BBObserver *_bbObserver; // 64 = 0x40
FBSApplicationDataStoreMonitor *_monitor; // 72 = 0x48
}
+ (_Bool)_isAppRestrictionProfileInstalled; // IMP=0x000000010006e33c
+ (_Bool)_checkApplicationIsRestricted:(id)arg1; // IMP=0x000000010006dfc4
+ (_Bool)_checkApplicationIsSystemAppRequiringProvisioning:(id)arg1; // IMP=0x000000010006decc
+ (id)_simulatorDisabledBundleIdentifiers; // IMP=0x0000000100069fd4
+ (id)_whitelistBlacklistExemptBundleIdentifiers; // IMP=0x0000000100069f3c
+ (id)_provisioningExemptBundleIdentifiers; // IMP=0x0000000100069e08
+ (id)sharedInstance; // IMP=0x0000000100069db4
+ (void)setupAppDepot; // IMP=0x0000000100069bdc
@property(retain, nonatomic) FBSApplicationDataStoreMonitor *monitor; // @synthesize monitor=_monitor;
@property(retain, nonatomic) BBObserver *bbObserver; // @synthesize bbObserver=_bbObserver;
@property(nonatomic) _Bool enforceProvisioningOnSystemAppsEnabled; // @synthesize enforceProvisioningOnSystemAppsEnabled=_enforceProvisioningOnSystemAppsEnabled;
@property(copy, nonatomic) PBKioskAppConfiguration *internalKioskAppConfiguration; // @synthesize internalKioskAppConfiguration=_internalKioskAppConfiguration;
@property(readonly, copy, nonatomic) NSArray *appWhitelistOptions; // @synthesize appWhitelistOptions=_appWhitelistOptions;
@property(copy, nonatomic) NSArray *internalProvisionedAppIdentifiers; // @synthesize internalProvisionedAppIdentifiers=_internalProvisionedAppIdentifiers;
@property(retain, nonatomic) NSMutableDictionary *internalAppState; // @synthesize internalAppState=_internalAppState;
- (void).cxx_destruct; // IMP=0x000000010006e584
- (void)_refreshCachedAppWhitelistOptions; // IMP=0x000000010006de00
- (id)_reversedDictionaryWithDictionary:(id)arg1; // IMP=0x000000010006dc60
- (id)_systemAppBundleIdentifiers; // IMP=0x000000010006da7c
- (void)observer:(id)arg1 noteServerConnectionStateChanged:(_Bool)arg2; // IMP=0x000000010006d948
- (void)observer:(id)arg1 updateSectionInfo:(id)arg2; // IMP=0x000000010006d888
- (void)_updateBadgeEnabledWithInfos:(id)arg1; // IMP=0x000000010006d5d0
- (void)_notifyAppStateDidChange; // IMP=0x000000010006d4c4
- (void)_setNeedsNotifyAppStateDidChange; // IMP=0x000000010006d404
- (void)_updateAppInfo; // IMP=0x000000010006c3a0
- (void)setNeedsReload; // IMP=0x000000010006c2e0
- (void)profileConnectionDidReceiveRestrictionChangedNotification:(id)arg1 userInfo:(id)arg2; // IMP=0x000000010006c2a8
- (void)profileConnectionDidReceiveAppWhitelistChangedNotification:(id)arg1 userInfo:(id)arg2; // IMP=0x000000010006c270
- (void)_loadInitialAppState; // IMP=0x000000010006befc
- (void)_removeAppStateForIdentifier:(id)arg1; // IMP=0x000000010006be18
- (id)_addAppStateForIdentifier:(id)arg1; // IMP=0x000000010006bd00
- (id)_appStateForIdentifier:(id)arg1; // IMP=0x000000010006bc7c
- (void)dataStoreMonitor:(id)arg1 didInvalidateApplication:(id)arg2; // IMP=0x000000010006bc60
- (void)dataStoreMonitor:(id)arg1 didUpdateApplication:(id)arg2 forKey:(id)arg3; // IMP=0x000000010006bc18
- (void)unsetCacheDeletingForBundleIdentifier:(id)arg1; // IMP=0x000000010006bb10
- (void)setCacheDeletingForBundleIdentifier:(id)arg1; // IMP=0x000000010006ba08
- (void)setIconName:(id)arg1 forBundleIdentifier:(id)arg2; // IMP=0x000000010006b8d0
- (void)setRecentlyUpdated:(_Bool)arg1 forBundleIdentifier:(id)arg2; // IMP=0x000000010006b7bc
- (id)dataSourceForApplicationBundleIdentifier:(id)arg1; // IMP=0x000000010006b6f8
- (void)applicationService:(id)arg1 getBadgeValueForBundleIdentifier:(id)arg2 withCompletion:(CDUnknownBlockType)arg3; // IMP=0x000000010006b480
- (void)applicationService:(id)arg1 setBadgeValue:(id)arg2 forBundleIdentifier:(id)arg3; // IMP=0x000000010006b314
- (void)removeAppStateForApplicationProxies:(id)arg1; // IMP=0x000000010006b0dc
- (void)addAppStateForApplicationProxies:(id)arg1; // IMP=0x000000010006ad04
@property(readonly, copy) PBKioskAppConfiguration *kioskAppConfiguration;
@property(readonly, copy) NSArray *provisionedAppIdentifiers;
- (id)appStateForAppIdentifier:(id)arg1; // IMP=0x000000010006a964
@property(readonly, copy) NSDictionary *appState;
- (id)_appDepotQueue; // IMP=0x000000010006a718
- (void)dealloc; // IMP=0x000000010006a610
- (id)init; // IMP=0x000000010006a074
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
|
Decatur-High-GlobalDynamics/FRC-2020-Team-4026
|
src/main/java/frc/robot/TeamSparkMAX.java
|
<reponame>Decatur-High-GlobalDynamics/FRC-2020-Team-4026
package frc.robot;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import com.revrobotics.CANSparkMax;
import com.revrobotics.CANPIDController;
import com.revrobotics.CANEncoder;
import com.revrobotics.CANError;
import com.revrobotics.ControlType;
public class TeamSparkMAX extends CANSparkMax {
public static double telemetryUpdateInterval_secs = 0.0;
private double lastTelemetryUpdate = 0;
protected final String smartDashboardPrefix;
protected int numEStops = 0;
protected double maxSpeed = Double.MAX_VALUE;
protected double smartMotionLoopTarget;
public CANPIDController canPidController;
public CANEncoder canEncoder;
protected PidParameters pidProfiles[] = new PidParameters[4];
private ControlType ctrlType = null;
public TeamSparkMAX(String smartDashboardPrefix, int deviceID) {
super(deviceID, MotorType.kBrushless); // Neos are brushless
this.smartDashboardPrefix = smartDashboardPrefix;
canPidController = getPIDController();
canEncoder = getEncoder();
}
private static boolean isPidControlMode(ControlType mode) {
// kDutyCycle, kVelocity, kVoltage, kPosition, kSmartMotion, kCurrent, kSmartVelocity
// Are all possible values. If one of these are not part of PID, add case for them and return
// false.
if (mode == null) {
return true;
}
switch (mode) {
case kCurrent:
return false;
default:
return true;
}
}
public void noteEmergencyStop() {
numEStops++;
}
public double getCurrentEncoderValue() {
// This should be configurable
return canEncoder.getPosition();
}
public void resetEncoder() {
canEncoder.setPosition(0.0);
}
public boolean
isRunningPidControlMode() { // Dunno if this is safe, but its the easiest way to get around
// problems with the PidParameters.
return isPidControlMode(ctrlType);
}
public void periodic() {
double now = TeamUtils.getCurrentTime();
if ((now - lastTelemetryUpdate) < telemetryUpdateInterval_secs) {
return;
}
lastTelemetryUpdate = now;
double currentEncoderValue = getCurrentEncoderValue();
double currentSpeed = canEncoder.getVelocity();
if (maxSpeed == Double.MAX_VALUE || currentSpeed > maxSpeed) maxSpeed = currentSpeed;
// SmartDashboard.putNumber(smartDashboardPrefix + ".PowerPercent", getMotorOutputPercent());
SmartDashboard.putNumber(smartDashboardPrefix + ".Position-ticks", currentEncoderValue);
SmartDashboard.putNumber(smartDashboardPrefix + ".speedPer100ms", currentSpeed);
SmartDashboard.putNumber(smartDashboardPrefix + ".speedPerSec", currentSpeed * 10);
SmartDashboard.putNumber(smartDashboardPrefix + ".maxSpeedPer100ms", maxSpeed);
SmartDashboard.putNumber(smartDashboardPrefix + ".maxSpeedPerSec", maxSpeed * 10);
// SmartDashboard.putString(smartDashboardPrefix + "Mode", getControlMode().toString());
SmartDashboard.putNumber(smartDashboardPrefix + "EmergencyStops", numEStops);
/*
switch (getControlMode()) {
case Position:
case Velocity:
SmartDashboard.putNumber(
smartDashboardPrefix + "Target",
getClosedLoopTarget(0)); // 0 is the primary closed-loop
SmartDashboard.putNumber(smartDashboardPrefix + "Error", getClosedLoopError(0));
break;
default:
// Fill in Zeros when we're not in a mode that is using it
SmartDashboard.putNumber(smartDashboardPrefix + "Target", 0);
SmartDashboard.putNumber(smartDashboardPrefix + "Error", 0);
}*/
}
public double getClosedLoopTarget() {
return this.smartMotionLoopTarget;
}
public double setClosedLoopTarget(double value) {
this.smartMotionLoopTarget = value;
return this.smartMotionLoopTarget;
}
public CANError setSmartMotionVelocity(double speed) {
setClosedLoopTarget(speed);
ctrlType = ControlType.kSmartVelocity;
return this.canPidController.setReference(Math.abs(speed), ControlType.kSmartVelocity);
}
public double getVelocityError() {
/* if (getControlMode() != ControlType.kSmartVelocity) {
return 0;
} */
double currentSpeed = canEncoder.getVelocity();
return getClosedLoopTarget() - currentSpeed;
}
public void configureWithPidParameters(PidParameters pidParameters, int pidSlotIndex) {
pidProfiles[pidSlotIndex] = pidParameters;
canPidController.setFF(pidParameters.kF, pidSlotIndex); // Feed-forward
canPidController.setP(pidParameters.kP, pidSlotIndex);
canPidController.setI(pidParameters.kI, pidSlotIndex);
canPidController.setD(pidParameters.kD, pidSlotIndex);
canPidController.setOutputRange(-pidParameters.kPeakOutput, pidParameters.kPeakOutput);
canPidController.setSmartMotionMaxVelocity(pidParameters.maxVel, pidSlotIndex);
canPidController.setSmartMotionMinOutputVelocity(0, pidSlotIndex);
canPidController.setSmartMotionMaxAccel(pidParameters.maxAcc, pidSlotIndex);
canPidController.setSmartMotionAllowedClosedLoopError(
pidParameters.errorTolerance, pidSlotIndex);
}
}
|
zhicaizack/main
|
src/test/java/seedu/address/logic/commands/ClassAddCommandTest.java
|
<filename>src/test/java/seedu/address/logic/commands/ClassAddCommandTest.java<gh_stars>1-10
package seedu.address.logic.commands;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static seedu.address.logic.commands.ClassAddCommand.MESSAGE_SUCCESS;
import static seedu.address.logic.commands.CommandTestUtil.VALID_CLASS_T16;
import static seedu.address.logic.commands.CommandTestUtil.VALID_MAX_ENROLLMENT_20;
import static seedu.address.logic.commands.CommandTestUtil.VALID_MODULE_CODE_CG1111;
import static seedu.address.logic.commands.CommandTestUtil.assertCommandSuccess;
import static seedu.address.testutil.TypicalPersons.getTypicalAddressBook;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import seedu.address.logic.CommandHistory;
import seedu.address.logic.commands.exceptions.CommandException;
import seedu.address.model.Model;
import seedu.address.model.ModelManager;
import seedu.address.model.StorageController;
import seedu.address.model.UserPrefs;
import seedu.address.model.classroom.ClassName;
import seedu.address.model.classroom.Classroom;
import seedu.address.model.classroom.ClassroomManager;
import seedu.address.model.classroom.Enrollment;
import seedu.address.model.module.Module;
import seedu.address.model.module.ModuleCode;
import seedu.address.model.module.ModuleManager;
import seedu.address.model.module.exceptions.DuplicateModuleException;
import seedu.address.testutil.ClassroomBuilder;
import seedu.address.testutil.ModuleBuilder;
/**
* Provides a test for the class add command
*/
public class ClassAddCommandTest {
private static ClassroomManager classroomManager;
@Rule
public ExpectedException thrown = ExpectedException.none();
private Model model = new ModelManager(getTypicalAddressBook(), new UserPrefs());
private CommandHistory commandHistory = new CommandHistory();
@Before
public void setUp() {
StorageController.enterTestMode();
ModuleManager moduleManager = ModuleManager.getInstance();
classroomManager = ClassroomManager.getInstance();
Module module = new ModuleBuilder().withModuleCode("CG1111").build();
try {
moduleManager.addModule(module);
} catch (DuplicateModuleException e) {
e.printStackTrace();
}
}
@Test
public void execute_classroomAccepted_addSuccessful() {
Classroom classroom = new ClassroomBuilder().build();
assertCommandSuccess(new ClassAddCommand(classroom), model, commandHistory,
String.format(MESSAGE_SUCCESS, classroom.getClassName(),
classroom.getModuleCode(), classroom.getMaxEnrollment()),
model);
}
@Test
public void execute_duplicateClassroom_throwsCommandException() throws Exception {
Classroom validClassroom = new ClassroomBuilder().build();
ClassAddCommand classAddCommand = new ClassAddCommand(validClassroom);
thrown.expect(CommandException.class);
thrown.expectMessage(ClassAddCommand.MESSAGE_DUPLICATE_CLASSROOM);
classAddCommand.execute(model, commandHistory);
}
@Test
public void execute_classroomInvalidModule_throwsCommandException() throws Exception {
Classroom validClassroom = new ClassroomBuilder().withModuleCode("CG1112").build();
ClassAddCommand classAddCommand = new ClassAddCommand(validClassroom);
thrown.expect(CommandException.class);
thrown.expectMessage(ClassAddCommand.MESSAGE_MODULE_CODE_INVALID);
classAddCommand.execute(model, commandHistory);
}
@Test
public void constructor_nullClassroom_throwsNullPointerException() {
thrown.expect(NullPointerException.class);
new ClassAddCommand(null);
}
@Test
public void equals() {
final String className = "T16";
final String moduleCode = "CG1111";
final String maxEnrollment = "20";
final ClassAddCommand standardCommand = new ClassAddCommand(new Classroom(
new ClassName(className),
new ModuleCode(moduleCode),
new Enrollment(maxEnrollment)));
// same values -> returns true
ClassAddCommand commandWithSameValues = new ClassAddCommand(
new Classroom(new ClassName(VALID_CLASS_T16),
new ModuleCode(VALID_MODULE_CODE_CG1111),
new Enrollment(VALID_MAX_ENROLLMENT_20)));
assertTrue(standardCommand.equals(commandWithSameValues));
// same object -> returns true
assertTrue(standardCommand.equals(standardCommand));
// null -> returns false
assertFalse(standardCommand.equals(null));
// different types -> returns false
assertFalse(standardCommand.equals(new ClearCommand()));
}
@AfterClass
public static void tearDown() {
classroomManager.clearClassrooms();
classroomManager.saveClassroomList();
}
}
|
tanguyf/fundamental-react
|
src/Navbar/Navbar.Component.js
|
import React from 'react';
import {} from '../';
import { DocsTile, DocsText, Separator, Header, Description, Import, Properties } from '..';
import {
Button,
Popover,
Menu,
MenuList,
MenuItem,
Identifier,
Navbar,
NavbarGroup,
NavbarActions,
NavbarElement
} from '../';
export const NavbarComponent = () => {
const navbarCode = `<Navbar>
<NavbarGroup alignment="left">
<NavbarElement type="side-menu">
<Button option="light" glyph="menu2" navbar />
</NavbarElement>
<NavbarElement type="logo" noMargin="left" />
<NavbarElement type="product-name">Product Name</NavbarElement>
</NavbarGroup>
<NavbarGroup launchpad>
<Popover
control={
<Button option="light">
Suite Name
</Button>
}
noArrow
body={
<Menu>
<MenuList>
<MenuItem url="/">Option 1</MenuItem>
<MenuItem url="/">Option 2</MenuItem>
<MenuItem url="/">Option 3</MenuItem>
<MenuItem url="/">Option 4</MenuItem>
</MenuList>
</Menu>
}
/>
</NavbarGroup>
<NavbarGroup alignment="right">
<NavbarElement type="context-menu">
<Popover
control={<Button option="light">Context Switcher</Button>}
body={
<Menu>
<MenuList>
<MenuItem url="/">Option 1</MenuItem>
<MenuItem url="/">Option 2</MenuItem>
<MenuItem url="/">Option 3</MenuItem>
<MenuItem url="/">Option 4</MenuItem>
</MenuList>
</Menu>
}
/>
</NavbarElement>
<NavbarActions>
<Button option="light" glyph="search" navbar />
<Button option="light" glyph="action-settings" navbar />
<Button option="light" navbar>
<Identifier size="s" modifier="circle">
WW
</Identifier>
</Button>
</NavbarActions>
</NavbarGroup>
</Navbar>`;
return (
<div>
<Header>Navigation Bar</Header>
<Description>
The navigation bar (Navbar) component offers a similar navigation approach between Hybris applications.
It allows for branding, application groupings, access to a left navigation, switch between applications,
contexts and access tooling such as search or a users’ profile.
</Description>
<Import module="Navbar" path="/fundamental-react/src/" />
<Separator />
<Properties
type="Inputs"
properties={[
{
name: 'alignment',
description:
"string - The position of the group in the navigation bar. Oprions include 'left' and 'right'. For default (middle) leave empty."
},
{
name: 'launchpad',
description: 'bool - When set to true, renders a navigation group as a launchpad.'
},
{
name: 'noMargin',
description:
"string - Renders the NavbarElement with no margin on either left or right side. Oprions include 'left' and 'right'. For default leave empty."
},
{
name: 'type',
description:
"string (required)- The type of the NavbarElement. Oprions include 'search', 'context-menu', 'product-name', 'logo', and 'side-menu'. "
}
]}
/>
<Separator />
<DocsTile>
<Navbar>
<NavbarGroup alignment="left">
<NavbarElement type="side-menu">
<Button option="light" glyph="menu2" navbar />
</NavbarElement>
<NavbarElement type="logo" noMargin="left" />
<NavbarElement type="product-name">Product Name</NavbarElement>
</NavbarGroup>
<NavbarGroup launchpad>
<Popover
control={<Button option="light">Suite Name</Button>}
noArrow
body={
<Menu>
<MenuList>
<MenuItem url="/">Option 1</MenuItem>
<MenuItem url="/">Option 2</MenuItem>
<MenuItem url="/">Option 3</MenuItem>
<MenuItem url="/">Option 4</MenuItem>
</MenuList>
</Menu>
}
/>
</NavbarGroup>
<NavbarGroup alignment="right">
<NavbarElement type="context-menu">
<Popover
control={<Button option="light">Context Switcher</Button>}
body={
<Menu>
<MenuList>
<MenuItem url="/">Option 1</MenuItem>
<MenuItem url="/">Option 2</MenuItem>
<MenuItem url="/">Option 3</MenuItem>
<MenuItem url="/">Option 4</MenuItem>
</MenuList>
</Menu>
}
/>
</NavbarElement>
<NavbarActions>
<Button option="light" glyph="search" navbar />
<Button option="light" glyph="action-settings" navbar />
<Button option="light" navbar>
<Identifier size="s" modifier="circle">
WW
</Identifier>
</Button>
</NavbarActions>
</NavbarGroup>
</Navbar>
</DocsTile>
<DocsText>{navbarCode}</DocsText>
<Separator />
</div>
);
};
|
yougov-poland/alpakka
|
kinesis/src/main/scala/akka/stream/alpakka/kinesis/javadsl/KinesisSchedulerSource.scala
|
/*
* Copyright (C) since 2016 Lightbend Inc. <https://www.lightbend.com>
*/
package akka.stream.alpakka.kinesis.javadsl
import java.util.concurrent.CompletionStage
import akka.NotUsed
import akka.stream.alpakka.kinesis.{CommittableRecord, scaladsl, _}
import akka.stream.javadsl.{Flow, Sink, Source, SubSource}
import software.amazon.kinesis.coordinator.Scheduler
import software.amazon.kinesis.processor.ShardRecordProcessorFactory
import software.amazon.kinesis.retrieval.KinesisClientRecord
import scala.compat.java8.FutureConverters._
import scala.concurrent.Future
object KinesisSchedulerSource {
abstract class SchedulerBuilder {
def build(r: ShardRecordProcessorFactory): Scheduler
}
def create(
schedulerBuilder: SchedulerBuilder,
settings: KinesisSchedulerSourceSettings
): Source[CommittableRecord, CompletionStage[Scheduler]] =
scaladsl.KinesisSchedulerSource
.apply(schedulerBuilder.build, settings)
.mapMaterializedValue(_.toJava)
.asJava
def createSharded(
schedulerBuilder: SchedulerBuilder,
settings: KinesisSchedulerSourceSettings
): SubSource[CommittableRecord, Future[Scheduler]] =
new SubSource(
scaladsl.KinesisSchedulerSource
.sharded(schedulerBuilder.build, settings)
)
def checkpointRecordsFlow(
settings: KinesisSchedulerCheckpointSettings
): Flow[CommittableRecord, KinesisClientRecord, NotUsed] =
scaladsl.KinesisSchedulerSource
.checkpointRecordsFlow(settings)
.asJava
def checkpointRecordsSink(
settings: KinesisSchedulerCheckpointSettings
): Sink[CommittableRecord, NotUsed] =
scaladsl.KinesisSchedulerSource
.checkpointRecordsSink(settings)
.asJava
}
|
XakepSDK/truevfs
|
truevfs-access/src/test/java/net/java/truevfs/access/sample/PathCat.java
|
<reponame>XakepSDK/truevfs
/*
* Copyright © 2005 - 2021 Schlichtherle IT Services.
* All rights reserved. Use is subject to license terms.
*/
package net.java.truevfs.access.sample;
import java.io.IOException;
import net.java.truevfs.access.TApplication;
import net.java.truevfs.access.TConfig;
import net.java.truevfs.access.TFile;
/**
* A poor man's imitate of the cat(1) command line utility
* for concatenating the contents of each parameter path name on the standard
* output.
*
* @deprecated Use the Maven archetype for the module TrueVFS Access instead.
* Its group ID is {@code net.java.truevfs}.
* Its artifact ID is {@code truevfs-archetype-access}.
* @see <a href="http://www.gnu.org/software/wget/">GNU Cat - Home Page</a>
* @author <NAME>
* @version $Id$
*/
@Deprecated
public final class PathCat extends TApplication<IOException> {
/** Equivalent to {@code System.exit(new PathCat().run(args));}. */
public static void main(String[] args) throws IOException {
System.exit(new PathCat().run(args));
}
@Override
protected void setup() { }
@Override
protected int work(String[] args) throws IOException {
for (String path : args)
pathCat(path);
return 0;
}
// START SNIPPET: cat
/**
* Copies the contents of the parameter resource to the standard output.
* <p>
* The set of archive file extensions detected by this method is determined
* by the current archive detector
* {@code TConfig.current().getArchiveDetector()}
* and the respective file system driver providers on the class path.
*
* @param resource the path name string of the resource to copy.
* @throws IOException if accessing the resource results in an I/O error.
*/
static void pathCat(String resource) throws IOException {
new TFile(resource).output(System.out);
}
// END SNIPPET: cat
}
|
lilywang711/nei
|
public/src/module/param_editor/param_select/param_select.js
|
/*
* 数据类型下拉框组件
*/
NEJ.define([
'pro/common/regular/regular_base',
'base/event',
'base/util',
'base/element',
'pro/common/util',
'pro/common/jst_extend',
'../param_editor_config.js',
'pro/modal/modal_version_select',
'text!./param_select.html',
'text!./param_select.css'
], function (rb, v, u, e, util, jstex, editorConfig, VSModal, html, css) {
// 加载一次
e._$addStyle(css);
// 常量
var ARROW_UP_KEY = 38;
var ARROW_DOWN_KEY = 40;
var ENTER_KEY = 13;
// 字符类型的id
var STRING_TYPE_ID = 10001;
// 默认选项
var defaultOptions = {
// 自动提示的数据源
source: [],
// 初始化时选中的项
selected: null,
// 是否打开列表, 如果没有这个值, 则初始时, config 中的 this.$watch('isOpen',...) 不会触发
isOpen: false,
// 是否为预览模式
preview: false,
// 是否可以编辑
editable: true,
// 输入框最大长度
maxLen: null,
emptyTip: '没有匹配的数据模型',
// 是否是普通参数,比如HTTP 接口的请求头和响应头,不会有自定义类型
isNormalParam: false,
// 是否接口请求参数中的必需字段
isRequiredField: false,
// 字段的名称,再设置 source 时会用到
key: false,
// 是否是集合的key
isHashMapKey: false,
// 字段所属的参数,再设置 source 时会用到
param: {},
// 是否是被导入的数据模型中的匿名类型
readonlyNestEditor: false,
// 强制只读
forceReadonly: false,
placeholder: '未设置'
};
// 支持事件:
// 1. change: 选项有变化时触发
// select.$on('change', function(evt) {
// evt.sender
// evt.selected
// })
var ParamSelect = rb.extend({
name: 'param-select',
template: html,
config: function () {
this.data = u._$merge({}, defaultOptions, this.data);
Object.assign(this.data, {
showDetail: false,
headers: editorConfig.options.headers,
formats: editorConfig.options.formats,
iHeaderNames: util.headname.map(function (name) {
return {id: name, name: name};
}),
iHeaderValues: util.headvalue.map(function (name) {
return {id: name, name: name};
}),
requiredFieldSource: [
{name: '否', id: 0},
{name: '是', id: 1}
],
isRequiredField: this.data.key === 'required'
});
this._source = this.data.source;
this.data.selected = {};
this.setDefaultSelected();
this._resetXlist();
this.data.versionsMap = util._$getVersionsMap(this._source);
// 强制为只读模式
if (this.data.forceReadonly) {
this.data.editable = false;
}
this.$watch('isOpen', function (isOpen) {
if (isOpen) {
this._handleData();
if (this.data.selected) {
this.data.__oSelected = this.data.selected;
}
this._resetXlist();
this._setListScrollTop();
this._addEventOnce();
this.$refs.input.select();
return;
} else {
if (!this.data.selected) {
if (this.data.isNormalParam) {
// 普通参数,都是可以任意输入的
this.data.selected = {
name: this.data.inputValue,
id: this.data.inputValue
};
} else {
// 如果没有匹配项,则选择之前的值
this.data.selected = this.data.__oSelected || {};
this.data.inputValue = this.data.selected.name;
}
} else if (this.data.isNormalParam) {
this.data.selected = {
name: this.data.inputValue,
id: this.data.inputValue
};
}
}
var emitChange = function () {
this.data.sorted = false;
this.$emit('change', {
sender: this,
selected: this.data.selected || {},
oSelected: this.data.__oSelected || {},
key: this.data.key,
param: this.data.param,
isNormalParam: this.data.isNormalParam,
isRequiredField: this.data.isRequiredField
});
}.bind(this);
if (this.data.__oSelected) {
emitChange();
}
// 关闭时清除高亮选中状态
this.data.sIndex = null;
}.bind(this));
},
modify: function (event, selected) {
event.preventDefault();
event.stopPropagation();
this.$emit('modify', {
ref: this,
selected: selected,
callback: function (evt) {
this._source = this.data.source;
this.setDefaultSelected();
this._resetXlist();
this.data.versionsMap = util._$getVersionsMap(this._source);
this.data.sorted = false;
this.$update();
}.bind(this)
});
},
setDefaultSelected: function () {
if (this.data.isRequiredField) {
var fieldIndex = this.data.param.required === 0 ? 0 : 1;
// 是和否下拉框都是可以修改的
this.data.editable = !this.data.readonlyNestEditor;
this.data.selected = this.data.requiredFieldSource[fieldIndex];
this.data.inputValue = this.data.selected.name;
if (Array.isArray(this._source)) {
this.data.__datatype = this._source.find(function (dt) {
return dt.id === this.data.selectedId;
}, this);
}
return;
}
if (this.data.isNormalParam) {
// 普通参数没有默认选项,比如HTTP 接口的请求头名称
Object.assign(this.data.selected, this.data.param);
this.data.inputValue = this.data.param[this.data.key];
if (this.data.key !== 'name') {
this.data.editable = true;
}
return;
}
// selectedId 是传入的需要选中的类型,默认是 String
this.data.selected.id = this.data.selectedId || STRING_TYPE_ID;
var selectedDatatype = this._source.find(function (dt) {
return dt.id === this.data.selected.id;
}, this);
Object.assign(this.data.selected, this._getFields(selectedDatatype));
},
_getFields: function (item) {
return {
id: item.id,
name: item.name,
title: item.title || (item.description ? (item.name + '(' + item.description + ')') : item.name),
projectId: item.projectId,
format: item.format,
params: item.params,
__hide: item.__hide,
__datatype: item
};
},
_handleData: function () {
var source = [];
if (this.data.isRequiredField) {
source = this.data.requiredFieldSource;
} else if (this.data.isNormalParam) {
// HTTP 接口的请求头及响应头名称
if (this.data.key === 'name') {
source = this.data.iHeaderNames;
} else if (this.data.key === 'defaultValue'
&& this.data.param.name
&& this.data.param.name.toLowerCase() === 'content-type'
) {
source = this.data.iHeaderValues;
}
} else {
// 过滤掉匿名类型
source = this._source.filter(function (dt) {
return !dt.__isAnon;
});
var filterList = util._$filterVersion(source);
var _find = function (id) {
return filterList.find(function (item) {
return item.id == id;
});
};
source.forEach(function (item) {
if (!_find(item.id)) {
item.__hide = true;
}
});
var systemDataTypes = [], normalDataTypes = [];
source.forEach(function (dt) {
if (dt.id <= 10003) {
systemDataTypes.push(dt);
} else {
normalDataTypes.push(dt);
}
});
var sortFuncByName = function (itemA, itemB) {
return itemA.name.toLowerCase().localeCompare(itemB.name.toLowerCase(), 'zh-CN');
};
systemDataTypes.sort(sortFuncByName);
normalDataTypes.sort(sortFuncByName);
source = systemDataTypes.concat(normalDataTypes);
}
this.data.source = source;
},
_resetXlist: function () {
this.data.xlist = this.data.source.map(function (item, index) {
if (this.data.selected && this.data.selected.id === item.id) {
this.data.sIndex = index;
this.data.inputValue = item.name;
}
return this._getFields(item);
}, this);
},
toggle: function (evt) {
this.data.showDetail = false;
this.$emit('toggle');
if (!this.data.editable) {
// 不可编辑模式
return;
}
this.data.isOpen = !this.data.isOpen;
this.$refs.input.focus();
},
focusInput: function (evt) {
// 记住打开前的选中项, 关闭时做对比, 如果有变化就发出 change 事件
this.data.__oSelected = this.data.selected;
this.$emit('focus');
},
_addEventOnce: function () {
this._hideHandler = this._hideHandler || function (evt) {
if (this._selectingVersion ||
(evt && this.$refs && evt.target === this.$refs.input) ||
(evt && this.$refs && evt.target === this.$refs.trigon)
) {
return;
}
this.data.isOpen = false;
Regular.dom.off(document, 'click', this._hideHandler);
this.$update();
}.bind(this);
// 需要先移除事件, 不然反复点击时会重复触发
Regular.dom.off(document, 'click', this._hideHandler);
Regular.dom.on(document, 'click', this._hideHandler);
},
select: function (evt, item, index, versionCheck) {
if (item.disabled) {
return evt.event.stopPropagation();
}
if (versionCheck && this.checkVersion(item.id)) {
this._selectingVersion = true;
this.selectVersion(item);
} else {
this.data.isOpen = false;
this.data.selected = item;
this.data.sIndex = index;
this.data.inputValue = item.name;
}
},
input: function (evt) {
evt.event.preventDefault();
// 只要有输入动作, 就需要打开下拉框
this.data.isOpen = true;
this.data.sIndex = null;
this.data.selected = null;
this.matchXlist();
this.data.inputValue = this.$refs.input.value.trim();
},
// 版本选择弹框
selectVersion: function (dt) {
var that = this;
var getVersions = function (id) {
if (that.data.versionsMap[id]) {
return that.data.versionsMap[id];
}
for (var key in that.data.versionsMap) {
var found = that.data.versionsMap[key].find(function (item) {
return item.id == id;
});
if (found) {
return that.data.versionsMap[key];
}
}
};
var versions = getVersions(dt.id);
new VSModal({
data: {
versions: versions,
selected: dt.id
}
}).$on('ok', function (selected) {
var index;
(this._source || []).some(function (it, ind) {
if (it.id === selected) {
index = ind;
return true;
}
});
var item = this._source[index];
this._selectingVersion = false;
this.select(null, item, index, false);
}.bind(this)).$on('cancel', function () {
this._selectingVersion = false;
}.bind(this));
},
// 判断当前id是否具有相关历史版本
checkVersion: function (id) {
if (this.data.versionsMap[id]) {
return true;
}
for (var key in this.data.versionsMap) {
var found = this.data.versionsMap[key].find(function (item) {
return item.id == id;
});
if (found) {
return true;
}
}
return false;
},
matchXlist: function () {
var value = this.$refs.input.value.trim();
this.data.xlist = this.data.source.filter(function (item) {
delete item.__uiName;
if (!value) {
return true;
}
var iv = item.name;
var hitIndex = iv.toLowerCase().indexOf(value.toLowerCase());
if (hitIndex !== -1) {
item.__uiName = jstex.escapeHtml(iv.substr(0, hitIndex)) + '<b class="hl">' + jstex.escapeHtml(iv.substr(hitIndex, value.length)) + '</b>' + jstex.escapeHtml(iv.substr(hitIndex + value.length, iv.length - 1));
return true;
}
var ivpy = item.namePinyin;
var matchPinyinResult = util.highlightPinyin(iv, ivpy, value);
if (matchPinyinResult) {
item.__uiName = matchPinyinResult;
return true;
}
}, this);
this.checkXList();
},
keydown: function (evt) {
var keyCode = evt.event.keyCode;
if (keyCode === ENTER_KEY) {
evt.event.preventDefault();
this.data.isOpen = !this.data.isOpen;
if (!this.data.isOpen) {
if (this.data.sIndex) {
this.data.selected = this.data.xlist[this.data.sIndex];
} else {
var firstItem = this.data.xlist[0];
if (firstItem && firstItem.id) {
this.data.selected = firstItem;
} else {
// 没有可选项的时候, 选择 source 里面的第一个
this.data.selected = this.data.source[0];
// 更新自动提示列表, 以便再次打开时的状态保持一致
this.data.xlist = [this.data.selected];
}
this.data.inputValue = this.data.selected ? this.data.selected.name : this.data.inputValue;
}
this._hideHandler();
}
} else if (keyCode === ARROW_UP_KEY) {
evt.event.preventDefault();
if (!this.data.isOpen) {
this.data.isOpen = true;
}
if (this.data.sIndex === null) {
this.data.sIndex = this.data.xlist.length - 1;
} else {
this.data.sIndex -= 1;
if (this.data.sIndex < 0) {
this.data.sIndex = this.data.xlist.length - 1;
}
}
if (this.data.xlist[this.data.sIndex].id) {
this.data.selected = this.data.xlist[this.data.sIndex];
this.data.inputValue = this.data.selected.name;
this._setListScrollTop();
} else {
// 没有可选择的项
this.data.sIndex = null;
}
} else if (keyCode === ARROW_DOWN_KEY) {
evt.event.preventDefault();
if (!this.data.isOpen) {
this.data.isOpen = true;
}
if (this.data.sIndex === null) {
this.data.sIndex = 0;
} else {
this.data.sIndex += 1;
if (this.data.sIndex > this.data.xlist.length - 1) {
this.data.sIndex = 0;
}
}
if (this.data.xlist[this.data.sIndex].id) {
this.data.selected = this.data.xlist[this.data.sIndex];
this.data.inputValue = this.data.selected.name;
this._setListScrollTop();
} else {
// 没有可选择的项
this.data.sIndex = null;
}
}
},
checkXList: function () {
if (this.data.xlist.length === 0) {
this.data.xlist.push({
name: '没有可选择的项',
disabled: true
});
}
},
_setListScrollTop: function () {
setTimeout(function () {
if (this.$refs && this.$refs.listcon) {
if (this.data.sIndex > 5) {
this.$refs.listcon.scrollTop = (this.data.sIndex - 5) * 36;
} else {
this.$refs.listcon.scrollTop = 0;
}
}
}.bind(this), 0);
},
create: function (evt) {
this.$emit('create', this);
},
/**
* 选择某一项, 如果该项不在 source 中, 就将它添加到 source 中
* @param {Object} item - 需要选中的项
**/
$select: function (item) {
var index = null;
var found = this.data.source.find(function (it, idx) {
if (it.id === item.id) {
index = idx;
return true;
}
});
var oSelected = this.data.selected;
// 判断该项是否和已经选中的相同
if (found && this.data.selected && this.data.selected.id === found.id) {
return;
}
if (!found) {
// 如果列表中不存在, 就将它添加到列表中
this._source.push(item);
this._handleData();
this.data.selected = this.data.source.find(function (it, idx) {
if (it.id === item.id) {
index = idx;
return true;
}
});
} else {
this.data.selected = found;
}
this.data.sIndex = index;
this.data.inputValue = this.data.selected.name;
this.$update();
// 触发 change 事件
this.$emit('change', {
sender: this,
selected: this.data.selected,
oSelected: oSelected
});
this.data.sorted = false;
},
mouseWheel: function (e) {
// 滚动到底后,不让滚动页面
var delta = e.event.wheelDelta || -e.event.detail;
// 默认显示 7 项
var minusNum = 7;
if (this.data.isArrayElement) {
// 如果是数组元素的类型,因为 array 是用 css 隐藏的,这里再减去 1
minusNum = 8;
}
if (delta < 0 && this.$refs.listcon.scrollTop >= (this.data.source.length - minusNum) * 30
|| delta > 0 && this.$refs.listcon.scrollTop <= 0
) {
e.preventDefault();
}
},
isComplexType: function () {
if (this.data.isNormalParam) {
return false;
}
var selected = this.data.selected;
if (selected && (selected.id > 10003)) {
// 数据模型是自定义类型
return true;
}
},
checkJump: function (event) {
this.$emit('checkJump');
},
showDatatypeDetail: function (evt, show) {
if (this.data.isNormalParam || !this.data.selected || this.data.selected.id <= 10003) {
// 基本类型无需显示详情
return;
}
clearTimeout(this.mouseleaveTime);
var handler = function () {
this.data.showDetail = show;
var selected = this.data.selected;
this.data.xheaders = this.data.headers[selected.format];
if (!this.data.sorted) {
this.data.sorted = true;
this.data.params = (selected.params || []).sort(function (itemA, itemB) {
return itemA.position - itemB.position;
});
}
var selectedDT = (this.data.source || []).filter(function (dt) {
return dt.id === selected.id;
})[0];
if (selectedDT && selectedDT.version && selectedDT.version.name) {
this.data.versionName = selectedDT.version.name;
}
this.$update();
}.bind(this);
if (evt.type === 'mouseleave') {
this.mouseleaveTime = setTimeout(handler, 100);
} else {
handler();
}
},
getFormatName: function () {
var format = this.data.selected.format;
return (this.data.formats.find(function (f) {
return f.format === format;
}) || {}).name;
},
getFieldName: function (param, key) {
var selected = this.data.selected;
var result = '';
if (param.isArray && key === 'typeName') {
result += 'Array ';
}
if (param.type <= 10003 || key !== 'typeName') {
result += param[key];
} else {
// 自定义类型,如果没有 typeName,则是匿名类型,显示为 Object
result += '<a href="' + this.getDatatypeDetailLink(this.data.docPreview, selected.projectId, selected.id) + '" class="stateful" title="' + param[key] + '">' + (param[key] || 'Object') + '</a>';
}
return result;
},
getDatatypeDetailLink: util._$getDatatypeDetailLink
});
return ParamSelect;
});
|
josehu07/SplitFS
|
kernel/linux-5.4/net/vmw_vsock/vsock_addr.c
|
<gh_stars>10-100
// SPDX-License-Identifier: GPL-2.0-only
/*
* VMware vSockets Driver
*
* Copyright (C) 2007-2012 VMware, Inc. All rights reserved.
*/
#include <linux/types.h>
#include <linux/socket.h>
#include <linux/stddef.h>
#include <net/sock.h>
#include <net/vsock_addr.h>
void vsock_addr_init(struct sockaddr_vm *addr, u32 cid, u32 port)
{
memset(addr, 0, sizeof(*addr));
addr->svm_family = AF_VSOCK;
addr->svm_cid = cid;
addr->svm_port = port;
}
EXPORT_SYMBOL_GPL(vsock_addr_init);
int vsock_addr_validate(const struct sockaddr_vm *addr)
{
if (!addr)
return -EFAULT;
if (addr->svm_family != AF_VSOCK)
return -EAFNOSUPPORT;
if (addr->svm_zero[0] != 0)
return -EINVAL;
return 0;
}
EXPORT_SYMBOL_GPL(vsock_addr_validate);
bool vsock_addr_bound(const struct sockaddr_vm *addr)
{
return addr->svm_port != VMADDR_PORT_ANY;
}
EXPORT_SYMBOL_GPL(vsock_addr_bound);
void vsock_addr_unbind(struct sockaddr_vm *addr)
{
vsock_addr_init(addr, VMADDR_CID_ANY, VMADDR_PORT_ANY);
}
EXPORT_SYMBOL_GPL(vsock_addr_unbind);
bool vsock_addr_equals_addr(const struct sockaddr_vm *addr,
const struct sockaddr_vm *other)
{
return addr->svm_cid == other->svm_cid &&
addr->svm_port == other->svm_port;
}
EXPORT_SYMBOL_GPL(vsock_addr_equals_addr);
int vsock_addr_cast(const struct sockaddr *addr,
size_t len, struct sockaddr_vm **out_addr)
{
if (len < sizeof(**out_addr))
return -EFAULT;
*out_addr = (struct sockaddr_vm *)addr;
return vsock_addr_validate(*out_addr);
}
EXPORT_SYMBOL_GPL(vsock_addr_cast);
|
phoenix-scitent/spree
|
promo/app/overrides/promo_coupon_code_field.rb
|
Deface::Override.new(:virtual_path => "spree/checkout/_payment",
:name => "promo_coupon_code_field",
:replace => "[data-hook='coupon_code_field'], #coupon_code_field[data-hook]",
:partial => "spree/checkout/coupon_code_field",
:disabled => false,
:original => '9c9f7058eb6fd9236a241621ab53b43e1caa1a0b' )
|
pothosware/pothos-serialization
|
include/Pothos/serialization/impl/mpl/map/aux_/preprocessed/plain/map30.hpp
|
// Copyright <NAME> 2000-2004
// Copyright <NAME> 2003-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// Preprocessed version of "boost/mpl/map/map30.hpp" header
// -- DO NOT modify by hand!
namespace Pothos { namespace mpl {
template< typename Map>
struct m_at< Map,20 >
{
typedef typename Map::item20 type;
};
template< typename Key, typename T, typename Base >
struct m_item< 21,Key,T,Base >
: m_item_< Key,T,Base >
{
typedef pair< Key,T > item20;
};
template<
typename P0, typename P1, typename P2, typename P3, typename P4
, typename P5, typename P6, typename P7, typename P8, typename P9
, typename P10, typename P11, typename P12, typename P13, typename P14
, typename P15, typename P16, typename P17, typename P18, typename P19
, typename P20
>
struct map21
: m_item<
21
, typename P20::first
, typename P20::second
, map20< P0,P1,P2,P3,P4,P5,P6,P7,P8,P9,P10,P11,P12,P13,P14,P15,P16,P17,P18,P19 >
>
{
typedef map21 type;
};
template< typename Map>
struct m_at< Map,21 >
{
typedef typename Map::item21 type;
};
template< typename Key, typename T, typename Base >
struct m_item< 22,Key,T,Base >
: m_item_< Key,T,Base >
{
typedef pair< Key,T > item21;
};
template<
typename P0, typename P1, typename P2, typename P3, typename P4
, typename P5, typename P6, typename P7, typename P8, typename P9
, typename P10, typename P11, typename P12, typename P13, typename P14
, typename P15, typename P16, typename P17, typename P18, typename P19
, typename P20, typename P21
>
struct map22
: m_item<
22
, typename P21::first
, typename P21::second
, map21< P0,P1,P2,P3,P4,P5,P6,P7,P8,P9,P10,P11,P12,P13,P14,P15,P16,P17,P18,P19,P20 >
>
{
typedef map22 type;
};
template< typename Map>
struct m_at< Map,22 >
{
typedef typename Map::item22 type;
};
template< typename Key, typename T, typename Base >
struct m_item< 23,Key,T,Base >
: m_item_< Key,T,Base >
{
typedef pair< Key,T > item22;
};
template<
typename P0, typename P1, typename P2, typename P3, typename P4
, typename P5, typename P6, typename P7, typename P8, typename P9
, typename P10, typename P11, typename P12, typename P13, typename P14
, typename P15, typename P16, typename P17, typename P18, typename P19
, typename P20, typename P21, typename P22
>
struct map23
: m_item<
23
, typename P22::first
, typename P22::second
, map22< P0,P1,P2,P3,P4,P5,P6,P7,P8,P9,P10,P11,P12,P13,P14,P15,P16,P17,P18,P19,P20,P21 >
>
{
typedef map23 type;
};
template< typename Map>
struct m_at< Map,23 >
{
typedef typename Map::item23 type;
};
template< typename Key, typename T, typename Base >
struct m_item< 24,Key,T,Base >
: m_item_< Key,T,Base >
{
typedef pair< Key,T > item23;
};
template<
typename P0, typename P1, typename P2, typename P3, typename P4
, typename P5, typename P6, typename P7, typename P8, typename P9
, typename P10, typename P11, typename P12, typename P13, typename P14
, typename P15, typename P16, typename P17, typename P18, typename P19
, typename P20, typename P21, typename P22, typename P23
>
struct map24
: m_item<
24
, typename P23::first
, typename P23::second
, map23< P0,P1,P2,P3,P4,P5,P6,P7,P8,P9,P10,P11,P12,P13,P14,P15,P16,P17,P18,P19,P20,P21,P22 >
>
{
typedef map24 type;
};
template< typename Map>
struct m_at< Map,24 >
{
typedef typename Map::item24 type;
};
template< typename Key, typename T, typename Base >
struct m_item< 25,Key,T,Base >
: m_item_< Key,T,Base >
{
typedef pair< Key,T > item24;
};
template<
typename P0, typename P1, typename P2, typename P3, typename P4
, typename P5, typename P6, typename P7, typename P8, typename P9
, typename P10, typename P11, typename P12, typename P13, typename P14
, typename P15, typename P16, typename P17, typename P18, typename P19
, typename P20, typename P21, typename P22, typename P23, typename P24
>
struct map25
: m_item<
25
, typename P24::first
, typename P24::second
, map24< P0,P1,P2,P3,P4,P5,P6,P7,P8,P9,P10,P11,P12,P13,P14,P15,P16,P17,P18,P19,P20,P21,P22,P23 >
>
{
typedef map25 type;
};
template< typename Map>
struct m_at< Map,25 >
{
typedef typename Map::item25 type;
};
template< typename Key, typename T, typename Base >
struct m_item< 26,Key,T,Base >
: m_item_< Key,T,Base >
{
typedef pair< Key,T > item25;
};
template<
typename P0, typename P1, typename P2, typename P3, typename P4
, typename P5, typename P6, typename P7, typename P8, typename P9
, typename P10, typename P11, typename P12, typename P13, typename P14
, typename P15, typename P16, typename P17, typename P18, typename P19
, typename P20, typename P21, typename P22, typename P23, typename P24
, typename P25
>
struct map26
: m_item<
26
, typename P25::first
, typename P25::second
, map25< P0,P1,P2,P3,P4,P5,P6,P7,P8,P9,P10,P11,P12,P13,P14,P15,P16,P17,P18,P19,P20,P21,P22,P23,P24 >
>
{
typedef map26 type;
};
template< typename Map>
struct m_at< Map,26 >
{
typedef typename Map::item26 type;
};
template< typename Key, typename T, typename Base >
struct m_item< 27,Key,T,Base >
: m_item_< Key,T,Base >
{
typedef pair< Key,T > item26;
};
template<
typename P0, typename P1, typename P2, typename P3, typename P4
, typename P5, typename P6, typename P7, typename P8, typename P9
, typename P10, typename P11, typename P12, typename P13, typename P14
, typename P15, typename P16, typename P17, typename P18, typename P19
, typename P20, typename P21, typename P22, typename P23, typename P24
, typename P25, typename P26
>
struct map27
: m_item<
27
, typename P26::first
, typename P26::second
, map26< P0,P1,P2,P3,P4,P5,P6,P7,P8,P9,P10,P11,P12,P13,P14,P15,P16,P17,P18,P19,P20,P21,P22,P23,P24,P25 >
>
{
typedef map27 type;
};
template< typename Map>
struct m_at< Map,27 >
{
typedef typename Map::item27 type;
};
template< typename Key, typename T, typename Base >
struct m_item< 28,Key,T,Base >
: m_item_< Key,T,Base >
{
typedef pair< Key,T > item27;
};
template<
typename P0, typename P1, typename P2, typename P3, typename P4
, typename P5, typename P6, typename P7, typename P8, typename P9
, typename P10, typename P11, typename P12, typename P13, typename P14
, typename P15, typename P16, typename P17, typename P18, typename P19
, typename P20, typename P21, typename P22, typename P23, typename P24
, typename P25, typename P26, typename P27
>
struct map28
: m_item<
28
, typename P27::first
, typename P27::second
, map27< P0,P1,P2,P3,P4,P5,P6,P7,P8,P9,P10,P11,P12,P13,P14,P15,P16,P17,P18,P19,P20,P21,P22,P23,P24,P25,P26 >
>
{
typedef map28 type;
};
template< typename Map>
struct m_at< Map,28 >
{
typedef typename Map::item28 type;
};
template< typename Key, typename T, typename Base >
struct m_item< 29,Key,T,Base >
: m_item_< Key,T,Base >
{
typedef pair< Key,T > item28;
};
template<
typename P0, typename P1, typename P2, typename P3, typename P4
, typename P5, typename P6, typename P7, typename P8, typename P9
, typename P10, typename P11, typename P12, typename P13, typename P14
, typename P15, typename P16, typename P17, typename P18, typename P19
, typename P20, typename P21, typename P22, typename P23, typename P24
, typename P25, typename P26, typename P27, typename P28
>
struct map29
: m_item<
29
, typename P28::first
, typename P28::second
, map28< P0,P1,P2,P3,P4,P5,P6,P7,P8,P9,P10,P11,P12,P13,P14,P15,P16,P17,P18,P19,P20,P21,P22,P23,P24,P25,P26,P27 >
>
{
typedef map29 type;
};
template< typename Map>
struct m_at< Map,29 >
{
typedef typename Map::item29 type;
};
template< typename Key, typename T, typename Base >
struct m_item< 30,Key,T,Base >
: m_item_< Key,T,Base >
{
typedef pair< Key,T > item29;
};
template<
typename P0, typename P1, typename P2, typename P3, typename P4
, typename P5, typename P6, typename P7, typename P8, typename P9
, typename P10, typename P11, typename P12, typename P13, typename P14
, typename P15, typename P16, typename P17, typename P18, typename P19
, typename P20, typename P21, typename P22, typename P23, typename P24
, typename P25, typename P26, typename P27, typename P28, typename P29
>
struct map30
: m_item<
30
, typename P29::first
, typename P29::second
, map29< P0,P1,P2,P3,P4,P5,P6,P7,P8,P9,P10,P11,P12,P13,P14,P15,P16,P17,P18,P19,P20,P21,P22,P23,P24,P25,P26,P27,P28 >
>
{
typedef map30 type;
};
}}
|
juitem/ONE
|
compiler/luci/partition/src/Partition.cpp
|
<reponame>juitem/ONE<filename>compiler/luci/partition/src/Partition.cpp
/*
* Copyright (c) 2021 Samsung Electronics Co., Ltd. All Rights Reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "PartitionIR.h"
#include "PartitionIRDump.h"
#include "PartitionPGroups.h"
#include "PartitionMerge.h"
#include "PartitionCleanup.h"
#include "PartitionPModules.h"
#include "PartitionPModulesDump.h"
#include "luci/Partition.h"
#include "luci/Log.h"
#include <cassert>
namespace luci
{
/**
* @brief This will return Partitioned Modules object
*/
PartedModules apply(Module *source, const PartitionTable &partition)
{
assert(source != nullptr);
LOGGER(l);
auto pgroups = produce_pgroups(source, partition);
INFO(l) << "--- Partition Graph (1)------------------------";
INFO(l) << pgroups.get();
auto mpgroups = merge_pgroups(pgroups.get());
INFO(l) << "--- Partition Graph (2)------------------------";
INFO(l) << mpgroups.get();
remove_unused_inputoutputs(mpgroups.get(), source);
INFO(l) << "--- Partition Graph (3)------------------------";
INFO(l) << mpgroups.get();
auto pmodules = produce_pmodules(mpgroups.get());
INFO(l) << "--- Modules -----------------------------------";
INFO(l) << &pmodules;
return pmodules;
}
} // namespace luci
|
jonsmock/org.alloytools.alloy
|
org.alloytools.kodkod.core/src/main/java/kodkod/util/collections/IdentityHashSet.java
|
<reponame>jonsmock/org.alloytools.alloy<filename>org.alloytools.kodkod.core/src/main/java/kodkod/util/collections/IdentityHashSet.java
/*
* Kodkod -- Copyright (c) 2005-present, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package kodkod.util.collections;
import java.util.AbstractSet;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* <p>
* Implements the <tt>Set</tt> interface with a hash table, using
* reference-equality in place of object-equality when comparing elements. In
* other words, in an <tt>IdentityHashSet</tt>, two elements <tt>e1</tt> and
* <tt>e2</tt> are considered equal if and only if <tt>(e1==e2)</tt>. (In normal
* <tt>Set</tt> implementations (like <tt>Set</tt>) two elements <tt>e1</tt> and
* <tt>e2</tt> are considered equal if and only if
* <tt>(e1==null ? e2==null : e1.equals(e2))</tt>.)
* <p>
* <b>This class is <i>not</i> a general-purpose <tt>Set</tt> implementation!
* While this class implements the <tt>Set</tt> interface, it intentionally
* violates <tt>Set's</tt> general contract, which mandates the use of the
* <tt>equals</tt> method when comparing objects. This class is designed for use
* only in the rare cases wherein reference-equality semantics are required.</b>
* <p>
* This class provides all of the optional set operations, and permits
* <tt>null</tt> elements. This class makes no guarantees as to the order of the
* set; in particular, it does not guarantee that the order will remain constant
* over time.
* <p>
* This class provides constant-time performance for the basic operations
* (<tt>get</tt> and <tt>put</tt>), assuming the system identity hash function
* ({@link System#identityHashCode(Object)}) disperses elements properly among
* the buckets.
* <p>
* This class has one tuning parameter (which affects performance but not
* semantics): <i>expected maximum size</i>. This parameter is the maximum
* number of elements that the set is expected to hold. Internally, this
* parameter is used to determine the number of buckets initially comprising the
* hash table. The precise relationship between the expected maximum size and
* the number of buckets is unspecified.
* <p>
* If the size of the set sufficiently exceeds the expected maximum size, the
* number of buckets is increased Increasing the number of buckets ("rehashing")
* may be fairly expensive, so it pays to create identity hash sets with a
* sufficiently large expected maximum size. On the other hand, iteration
* requires time proportional to the number of buckets in the hash table, so it
* pays not to set the expected maximum size too high if you are especially
* concerned with iteration performance or memory usage.
* <p>
* <b>Note that this implementation is not synchronized.</b> The iterators
* returned by all of this class are <i>not fail-fast</i>: in the face of
* concurrent modification, the iterator risks arbitrary, non-deterministic
* behavior at an undetermined time in the future.
* <p>
* Implementation note: This is a simple <i>linear-probe</i> hash table, as
* described for example in texts by Sedgewick and Knuth. For many JRE
* implementations and operation mixes, this class will yield better performance
* than {@link HashSet} (which uses <i>chaining</i> rather than linear-probing).
*
* @specfield elems: set T
* @author <NAME>
*/
public final class IdentityHashSet<T> extends AbstractSet<T> {
/* implementation adapted from java.util.IdentityHashMap */
/**
* The minimum capacity, used if a lower value is implicitly specified by
* either of the constructors with arguments. The value 4 corresponds to an
* expected maximum size of 2, given a load factor of 2/3. MUST be a power
* of two.
*/
private static final int MINIMUM_CAPACITY = 4;
/**
* The maximum capacity, used if a higher value is implicitly specified by
* either of the constructors with arguments. MUST be a power of two <=
* 1<<29.
*/
private static final int MAXIMUM_CAPACITY = 1 << 29;
/**
* Value representing null elements inside tables.
*/
private static final Object NULL = new Object();
/**
* Use NULL for key if it is null.
*/
private static Object maskNull(Object o) {
return (o == null ? NULL : o);
}
/**
* Return internal representation of null key back to caller as null
*/
private static Object unmaskNull(Object o) {
return (o == NULL ? null : o);
}
/**
* The table, resized as necessary. Length MUST always be a power of two.
*/
private Object[] table;
/**
* The number of key-value mappings contained in this identity hash map.
*/
private int size;
/**
* The next size value at which to resize (capacity * load factor).
*/
private int threshold;
/**
* Constructs a new, empty identity hash map with a default expected maximum
* size of 16.
*
* @ensures no this.elems'
*/
public IdentityHashSet() {
this(16);
}
/**
* Constructs a new, empty set with the specified expected maximum size.
* Putting more than the expected number of elements into the set may cause
* the internal data structure to grow, which may be somewhat
* time-consuming.
*
* @ensures no this.elems'
* @throws IllegalArgumentException <tt>expectedMaxSize</tt> < 0
*/
public IdentityHashSet(int expectedMaxSize) {
if (expectedMaxSize < 0)
throw new IllegalArgumentException("expectedMaxSize < 0: " + expectedMaxSize);
final int initCapacity = capacity(expectedMaxSize);
threshold = (initCapacity * 2) / 3;
table = new Object[initCapacity];
size = 0;
}
/**
* Constructs a new identity hash set containing the elements in the
* specified collection.
*
* @ensures this.elems' = c.elems
* @throws NullPointerException c = null
*/
public IdentityHashSet(Collection< ? extends T> c) {
// Allow for a bit of growth
this((int) ((1 + c.size()) * 1.1));
addAll(c);
}
/**
* Returns the appropriate capacity for the specified expected maximum size.
* Returns the smallest power of two between MINIMUM_CAPACITY and
* MAXIMUM_CAPACITY, inclusive, that is greater than (3 *
* expectedMaxSize)/2, if such a number exists. Otherwise returns
* MAXIMUM_CAPACITY. If (3 * expectedMaxSize)/2 is negative, it is assumed
* that overflow has occurred, and MAXIMUM_CAPACITY is returned.
*/
private static int capacity(int expectedMaxSize) {
// Compute min capacity for expectedMaxSize given a load factor of 2/3
final int minCapacity = (3 * expectedMaxSize) / 2;
// Compute the appropriate capacity
int result;
if (minCapacity > MAXIMUM_CAPACITY || minCapacity < 0) {
result = MAXIMUM_CAPACITY;
} else {
result = StrictMath.max(MINIMUM_CAPACITY, Integer.highestOneBit(minCapacity));
if (result < minCapacity)
result <<= 1;
}
return result;
}
/**
* @inheritDoc
*/
@Override
public Iterator<T> iterator() {
return new IdentityIterator();
}
/**
* @inheritDoc
*/
@Override
public int size() {
return size;
}
/**
* @inheritDoc
*/
@Override
public boolean isEmpty() {
return size == 0;
}
/**
* Return index for Object x in a table of size length.
*/
private static int hash(Object x, int length) {
return System.identityHashCode(x) & (length - 1);
}
/**
* Circularly traverse table of size length.
**/
private static int nextKeyIndex(int i, int length) {
return (i + 3) & (length - 1);
}
/**
* Tests whether the specified object reference is an element in this
* identity hash map.
*
* @return o in this.elems
*/
@Override
public boolean contains(Object o) {
o = maskNull(o);
for (int i = hash(o, table.length);; i = nextKeyIndex(i, table.length)) {
Object item = table[i];
if (item == o)
return true;
if (item == null)
return false;
}
}
/**
* @inheritDoc
*/
@Override
public boolean add(T element) {
final Object o = maskNull(element);
int i = hash(o, table.length);
for (Object item = table[i]; item != null; item = table[i]) {
if (item == o)
return false;
i = nextKeyIndex(i, table.length);
}
table[i] = o;
if (++size >= threshold)
resize(table.length << 1); // newCapacity == 2 * current capacity.
return true;
}
/**
* Resize the table to hold given capacity. The new capacity must be a power
* of two.
*/
private void resize(int newCapacity) {
final int oldLength = table.length;
if (oldLength == MAXIMUM_CAPACITY) {// can't expand any further
if (threshold == MAXIMUM_CAPACITY - 1)
throw new IllegalStateException("Capacity exhausted.");
threshold = MAXIMUM_CAPACITY - 1;
return;
}
if (oldLength >= newCapacity)
return;
final Object[] newTable = new Object[newCapacity];
for (int j = 0; j < oldLength; j++) {
Object o = table[j];
if (o != null) {
table[j] = null;
int i = hash(o, newCapacity);
while (newTable[i] != null)
i = nextKeyIndex(i, newCapacity);
newTable[i] = o;
}
}
table = newTable;
threshold = (newCapacity * 2) / 3;
}
/**
* @inheritDoc
*/
@Override
public boolean remove(Object o) {
o = maskNull(o);
for (int i = hash(o, table.length);; i = nextKeyIndex(i, table.length)) {
Object item = table[i];
if (item == o) {
size--;
table[i] = null;
closeDeletion(i);
return true;
}
if (item == null)
return false;
}
}
/**
* Rehash all possibly-colliding entries following a deletion. This
* preserves the linear-probe collision properties required by get, put,
* etc.
*
* @param d the index of a newly empty deleted slot
*/
private void closeDeletion(int d) {
// Adapted from Knuth Section 6.4 Algorithm R
// Look for items to swap into newly vacated slot
// starting at index immediately following deletion,
// and continuing until a null slot is seen, indicating
// the end of a run of possibly-colliding keys.
Object item;
for (int i = nextKeyIndex(d, table.length); (item = table[i]) != null; i = nextKeyIndex(i, table.length)) {
// The following test triggers if the item at slot i (which
// hashes to be at slot r) should take the spot vacated by d.
// If so, we swap it in, and then continue with d now at the
// newly vacated i. This process will terminate when we hit
// the null slot at the end of this run.
// The test is messy because we are using a circular table.
int r = hash(item, table.length);
if ((i < r && (r <= d || d <= i)) || (r <= d && d <= i)) {
table[d] = item;
table[i] = null;
d = i;
}
}
}
/**
* @inheritDoc
*/
@Override
public boolean addAll(Collection< ? extends T> c) {
int n = c.size();
if (n == 0)
return false;
if (n > threshold) // conservatively pre-expand
resize(capacity(n));
return super.addAll(c);
}
/**
* @inheritDoc
*/
@Override
public boolean removeAll(Collection< ? > c) {
/*
* Must revert from AbstractSet's impl to AbstractCollection's, as the
* former contains an optimization that results in incorrect behavior
* when c is a smaller "normal" (non-identity-based) Set.
*/
boolean modified = false;
Iterator< ? > e = iterator();
while (e.hasNext()) {
if (c.contains(e.next())) {
e.remove();
modified = true;
}
}
return modified;
}
/**
* @inheritDoc
*/
@Override
public void clear() {
for (int i = 0; i < table.length; i++)
table[i] = null;
size = 0;
}
/**
* Compares the specified object with this set for equality. Returns
* <tt>true</tt> if the given object is also a set and the two sets contain
* identical object-references.
* <p>
* <b>Owing to the reference-equality-based semantics of this set it is
* possible that the symmetry and transitivity requirements of the
* <tt>Object.equals</tt> contract may be violated if this set is compared
* to a normal set. However, the <tt>Object.equals</tt> contract is
* guaranteed to hold among <tt>IdentityHashSet</tt> instances.</b>
*
* @return <tt>true</tt> if the specified object is equal to this set.
* @see Object#equals(Object)
*/
public boolean equals(Object o) {
if (o == this) {
return true;
} else if (o instanceof IdentityHashSet) {
final IdentityHashSet< ? > s = (IdentityHashSet< ? >) o;
if (s.size() != size)
return false;
final Object[] tab = s.table;
for (int i = 0; i < tab.length; i++) {
Object k = tab[i];
if (k != null && !contains(k))
return false;
}
return true;
} else {
return super.equals(o);
}
}
/**
* Returns the hash code value for this set. The hash code of a set is
* defined to be the sum of the hashcode of each entry in the set. This
* ensures that <tt>t1.equals(t2)</tt> implies that
* <tt>t1.hashCode()==t2.hashCode()</tt> for any two
* <tt>IdentityHashSet</tt> instances <tt>t1</tt> and <tt>t2</tt>, as
* required by the general contract of {@link Object#hashCode()}.
* <p>
* <b>Owing to the reference-equality-based semantics of the elements in
* this set, it is possible that the contractual requirement of
* <tt>Object.hashCode</tt> mentioned in the previous paragraph will be
* violated if one of the two objects being compared is an
* <tt>IdentityHashSet</tt> instance and the other is a normal set.</b>
*
* @return the hash code value for this set.
* @see Object#hashCode()
* @see Object#equals(Object)
* @see #equals(Object)
*/
public int hashCode() {
int result = 0;
for (Object o : table) {
if (o != null) {
result += System.identityHashCode(unmaskNull(o));
}
}
return result;
}
/**
* An iterator over the elements of an IdentityHashSet.
*/
private final class IdentityIterator implements Iterator<T> {
int index = (size != 0 ? 0 : table.length); // current
// slot.
int lastReturnedIndex = -1; // to
// allow
// remove()
Object[] traversalTable = table; // reference
// to
// main
// table
// or
// copy
public boolean hasNext() {
for (int i = index; i < traversalTable.length; i++) {
if (traversalTable[i] != null) {
index = i;
return true;
}
}
index = traversalTable.length;
return false;
}
@SuppressWarnings("unchecked")
public T next() {
if (!hasNext())
throw new NoSuchElementException();
lastReturnedIndex = index++;
return (T) unmaskNull(traversalTable[lastReturnedIndex]);
}
public void remove() {
if (lastReturnedIndex == -1)
throw new IllegalStateException();
final int deletedSlot = lastReturnedIndex;
lastReturnedIndex = -1;
// If traversing a copy, remove in real table.
// We can skip gap-closure on copy.
if (traversalTable != IdentityHashSet.this.table) {
IdentityHashSet.this.remove(traversalTable[deletedSlot]);
traversalTable[deletedSlot] = null;
} else { // we are working on the real table...
// back up index to revisit new contents after deletion
size--;
index = deletedSlot;
// Removal code proceeds as in closeDeletion except that
// it must catch the rare case where an element already
// seen is swapped into a vacant slot that will be later
// traversed by this iterator. We cannot allow future
// next() calls to return it again. The likelihood of
// this occurring under 2/3 load factor is very slim, but
// when it does happen, we must make a copy of the rest of
// the table to use for the rest of the traversal. Since
// this can only happen when we are near the end of the table,
// even in these rare cases, this is not very expensive in
// time or space.
final Object[] tab = traversalTable;
final int length = tab.length;
int d = deletedSlot;
tab[d] = null; // vacate the slot
Object item;
for (int i = nextKeyIndex(d, length); (item = tab[i]) != null; i = nextKeyIndex(i, length)) {
int r = hash(item, length);
// See closeDeletion for explanation of this conditional
if ((i < r && (r <= d || d <= i)) || (r <= d && d <= i)) {
// If we are about to swap an already-seen element
// into a slot that may later be returned by next(),
// then clone the rest of table for use in future
// next() calls. It is OK that our copy will have
// a gap in the "wrong" place, since it will never
// be used for searching anyway.
if (i < deletedSlot && d >= deletedSlot && traversalTable == IdentityHashSet.this.table) {
int remaining = length - deletedSlot;
Object[] newTable = new Object[remaining];
System.arraycopy(tab, deletedSlot, newTable, 0, remaining);
traversalTable = newTable;
index = 0;
}
tab[d] = item;
tab[i] = null;
d = i;
}
}
}
}
}
// public static void main(String[] args) {
// IdentityHashSet<Integer> s = new IdentityHashSet<Integer>(21);
// Integer[] elts = new Integer[21];
// for(int i = 0; i < elts.length; i++) {
// elts[i] = new Integer(i);
// s.add(elts[i]);
// }
// System.out.println(s);
// System.out.println(s.size());
// System.out.println(s.table.length);
// System.out.println(s.threshold);
// System.out.println(s.contains(2));
// System.out.println(s.contains(elts[2]));
// System.out.println(s.remove(new Integer(0)));
// System.out.println(s.remove(elts[0]));
// System.out.println(s);
//
// for(Iterator<Integer> iter = s.iterator(); iter.hasNext(); ) {
// System.out.println(iter.next());
// iter.remove();
// }
// System.out.println(s);
//
// }
}
|
josepfo/lamassu-server
|
new-lamassu-admin/src/pages/Analytics/graphs/OverTimeDotGraph.js
|
<reponame>josepfo/lamassu-server
import BigNumber from 'bignumber.js'
import * as d3 from 'd3'
import { getTimezoneOffset } from 'date-fns-tz'
import { add, format, startOfWeek, startOfYear } from 'date-fns/fp'
import * as R from 'ramda'
import React, { memo, useCallback, useEffect, useMemo, useRef } from 'react'
import {
java,
neon,
subheaderDarkColor,
offColor,
fontColor,
primaryColor,
fontSecondary,
subheaderColor
} from 'src/styling/variables'
import { MINUTE, DAY, WEEK, MONTH } from 'src/utils/time'
const Graph = ({
data,
period,
timezone,
setSelectionCoords,
setSelectionData,
setSelectionDateInterval
}) => {
const ref = useRef(null)
const GRAPH_POPOVER_WIDTH = 150
const GRAPH_POPOVER_MARGIN = 25
const GRAPH_HEIGHT = 401
const GRAPH_WIDTH = 1163
const GRAPH_MARGIN = useMemo(
() => ({
top: 25,
right: 0.5,
bottom: 27,
left: 36.5
}),
[]
)
const offset = getTimezoneOffset(timezone)
const NOW = Date.now() + offset
const periodDomains = {
day: [NOW - DAY, NOW],
week: [NOW - WEEK, NOW],
month: [NOW - MONTH, NOW]
}
const dataPoints = useMemo(
() => ({
day: {
freq: 24,
step: 60 * 60 * 1000,
tick: d3.utcHour.every(1),
labelFormat: '%H:%M'
},
week: {
freq: 7,
step: 24 * 60 * 60 * 1000,
tick: d3.utcDay.every(1),
labelFormat: '%a %d'
},
month: {
freq: 30,
step: 24 * 60 * 60 * 1000,
tick: d3.utcDay.every(1),
labelFormat: '%d'
}
}),
[]
)
const getPastAndCurrentDayLabels = useCallback(d => {
const currentDate = new Date(d)
const currentDateDay = currentDate.getUTCDate()
const currentDateWeekday = currentDate.getUTCDay()
const currentDateMonth = currentDate.getUTCMonth()
const previousDate = new Date(currentDate.getTime())
previousDate.setUTCDate(currentDateDay - 1)
const previousDateDay = previousDate.getUTCDate()
const previousDateWeekday = previousDate.getUTCDay()
const previousDateMonth = previousDate.getUTCMonth()
const daysOfWeek = Array.from(Array(7)).map((_, i) =>
format('EEE', add({ days: i }, startOfWeek(new Date())))
)
const months = Array.from(Array(12)).map((_, i) =>
format('LLL', add({ months: i }, startOfYear(new Date())))
)
return {
previous:
currentDateMonth !== previousDateMonth
? months[previousDateMonth]
: `${daysOfWeek[previousDateWeekday]} ${previousDateDay}`,
current:
currentDateMonth !== previousDateMonth
? months[currentDateMonth]
: `${daysOfWeek[currentDateWeekday]} ${currentDateDay}`
}
}, [])
const buildTicks = useCallback(
domain => {
const points = []
const roundDate = d => {
const step = dataPoints[period.code].step
return new Date(Math.ceil(d.valueOf() / step) * step)
}
for (let i = 0; i <= dataPoints[period.code].freq; i++) {
const stepDate = new Date(NOW - i * dataPoints[period.code].step)
if (roundDate(stepDate) > domain[1]) continue
if (stepDate < domain[0]) continue
points.push(roundDate(stepDate))
}
return points
},
[NOW, dataPoints, period.code]
)
const buildAreas = useCallback(
domain => {
const points = []
points.push(domain[1])
const roundDate = d => {
const step = dataPoints[period.code].step
return new Date(Math.ceil(d.valueOf() / step) * step)
}
for (let i = 0; i <= dataPoints[period.code].freq; i++) {
const stepDate = new Date(NOW - i * dataPoints[period.code].step)
if (roundDate(stepDate) > new Date(domain[1])) continue
if (stepDate < new Date(domain[0])) continue
points.push(roundDate(stepDate))
}
points.push(domain[0])
return points
},
[NOW, dataPoints, period.code]
)
const x = d3
.scaleUtc()
.domain(periodDomains[period.code])
.range([GRAPH_MARGIN.left, GRAPH_WIDTH - GRAPH_MARGIN.right])
const y = d3
.scaleLinear()
.domain([
0,
(d3.max(data, d => new BigNumber(d.fiat).toNumber()) ?? 1000) * 1.03
])
.nice()
.range([GRAPH_HEIGHT - GRAPH_MARGIN.bottom, GRAPH_MARGIN.top])
const getAreaInterval = (breakpoints, limits) => {
const fullBreakpoints = [
limits[1],
...R.filter(it => it > limits[0] && it < limits[1], breakpoints),
limits[0]
]
const intervals = []
for (let i = 0; i < fullBreakpoints.length - 1; i++) {
intervals.push([fullBreakpoints[i], fullBreakpoints[i + 1]])
}
return intervals
}
const getAreaIntervalByX = (intervals, xValue) => {
return R.find(it => xValue <= it[0] && xValue >= it[1], intervals) ?? [0, 0]
}
const getDateIntervalByX = (areas, intervals, xValue) => {
const flattenIntervals = R.uniq(R.flatten(intervals))
// flattenIntervals and areas should have the same number of elements
for (let i = intervals.length - 1; i >= 0; i--) {
if (xValue < flattenIntervals[i]) {
return [areas[i], areas[i + 1]]
}
}
}
const buildXAxis = useCallback(
g =>
g
.attr(
'transform',
`translate(0, ${GRAPH_HEIGHT - GRAPH_MARGIN.bottom})`
)
.call(
d3
.axisBottom(x)
.ticks(dataPoints[period.code].tick)
.tickFormat(d => {
return d3.timeFormat(dataPoints[period.code].labelFormat)(
d.getTime() + d.getTimezoneOffset() * MINUTE
)
})
)
.call(g => g.select('.domain').remove())
.call(g =>
g
.append('line')
.attr('x1', GRAPH_MARGIN.left)
.attr('y1', -GRAPH_HEIGHT + GRAPH_MARGIN.top + GRAPH_MARGIN.bottom)
.attr('x2', GRAPH_MARGIN.left)
.attr('stroke', primaryColor)
.attr('stroke-width', 1)
),
[GRAPH_MARGIN, dataPoints, period.code, x]
)
const buildYAxis = useCallback(
g =>
g
.attr('transform', `translate(${GRAPH_MARGIN.left}, 0)`)
.call(d3.axisLeft(y).ticks(GRAPH_HEIGHT / 100))
.call(g => g.select('.domain').remove())
.call(g =>
g
.selectAll('.tick line')
.filter(d => d === 0)
.clone()
.attr('x2', GRAPH_WIDTH - GRAPH_MARGIN.right - GRAPH_MARGIN.left)
.attr('stroke-width', 1)
.attr('stroke', primaryColor)
),
[GRAPH_MARGIN, y]
)
const buildGrid = useCallback(
g => {
g.attr('stroke', subheaderDarkColor)
.attr('fill', subheaderDarkColor)
// Vertical lines
.call(g =>
g
.append('g')
.selectAll('line')
.data(buildTicks(x.domain()))
.join('line')
.attr('x1', d => 0.5 + x(d))
.attr('x2', d => 0.5 + x(d))
.attr('y1', GRAPH_MARGIN.top)
.attr('y2', GRAPH_HEIGHT - GRAPH_MARGIN.bottom)
)
// Horizontal lines
.call(g =>
g
.append('g')
.selectAll('line')
.data(
d3
.axisLeft(y)
.scale()
.ticks(GRAPH_HEIGHT / 100)
)
.join('line')
.attr('y1', d => 0.5 + y(d))
.attr('y2', d => 0.5 + y(d))
.attr('x1', GRAPH_MARGIN.left)
.attr('x2', GRAPH_WIDTH - GRAPH_MARGIN.right)
)
// Vertical transparent rectangles for events
.call(g =>
g
.append('g')
.selectAll('line')
.data(buildAreas(x.domain()))
.join('rect')
.attr('x', d => x(d))
.attr('y', GRAPH_MARGIN.top)
.attr('width', d => {
const xValue = Math.round(x(d) * 100) / 100
const intervals = getAreaInterval(
buildAreas(x.domain()).map(it => Math.round(x(it) * 100) / 100),
x.range()
)
const interval = getAreaIntervalByX(intervals, xValue)
return Math.round((interval[0] - interval[1]) * 100) / 100
})
.attr(
'height',
GRAPH_HEIGHT - GRAPH_MARGIN.bottom - GRAPH_MARGIN.top
)
.attr('stroke', 'transparent')
.attr('fill', 'transparent')
.on('mouseover', d => {
const xValue = Math.round(d.target.x.baseVal.value * 100) / 100
const areas = buildAreas(x.domain())
const intervals = getAreaInterval(
buildAreas(x.domain()).map(it => Math.round(x(it) * 100) / 100),
x.range()
)
const dateInterval = getDateIntervalByX(areas, intervals, xValue)
const filteredData = data.filter(it => {
const created = new Date(it.created)
const tzCreated = created.setTime(created.getTime() + offset)
return (
tzCreated > new Date(dateInterval[1]) &&
tzCreated <= new Date(dateInterval[0])
)
})
const rectXCoords = {
left: R.clone(d.target.getBoundingClientRect().x),
right: R.clone(
d.target.getBoundingClientRect().x +
d.target.getBoundingClientRect().width
)
}
const xCoord =
d.target.x.baseVal.value < 0.75 * GRAPH_WIDTH
? rectXCoords.right + GRAPH_POPOVER_MARGIN
: rectXCoords.left -
GRAPH_POPOVER_WIDTH -
GRAPH_POPOVER_MARGIN
const yCoord = R.clone(d.target.getBoundingClientRect().y)
setSelectionDateInterval(dateInterval)
setSelectionData(filteredData)
setSelectionCoords({
x: Math.round(xCoord),
y: Math.round(yCoord)
})
d3.select(d.target).attr('fill', subheaderColor)
})
.on('mouseleave', d => {
d3.select(d.target).attr('fill', 'transparent')
setSelectionDateInterval(null)
setSelectionData(null)
setSelectionCoords(null)
})
)
// Thick vertical lines
.call(g =>
g
.append('g')
.selectAll('line')
.data(
buildTicks(x.domain()).filter(x => {
if (period.code === 'day') return x.getUTCHours() === 0
return x.getUTCDate() === 1
})
)
.join('line')
.attr('class', 'dateSeparator')
.attr('x1', d => 0.5 + x(d))
.attr('x2', d => 0.5 + x(d))
.attr('y1', GRAPH_MARGIN.top - 50)
.attr('y2', GRAPH_HEIGHT - GRAPH_MARGIN.bottom)
.attr('stroke-width', 5)
.join('text')
)
// Left side breakpoint label
.call(g => {
const separator = d3
?.select('.dateSeparator')
?.node()
?.getBBox()
if (!separator) return
const breakpoint = buildTicks(x.domain()).filter(x => {
if (period.code === 'day') return x.getUTCHours() === 0
return x.getUTCDate() === 1
})
const labels = getPastAndCurrentDayLabels(breakpoint)
return g
.append('text')
.attr('x', separator.x - 10)
.attr('y', separator.y + 33)
.attr('text-anchor', 'end')
.attr('dy', '.25em')
.text(labels.previous)
})
// Right side breakpoint label
.call(g => {
const separator = d3
?.select('.dateSeparator')
?.node()
?.getBBox()
if (!separator) return
const breakpoint = buildTicks(x.domain()).filter(x => {
if (period.code === 'day') return x.getUTCHours() === 0
return x.getUTCDate() === 1
})
const labels = getPastAndCurrentDayLabels(breakpoint)
return g
.append('text')
.attr('x', separator.x + 10)
.attr('y', separator.y + 33)
.attr('text-anchor', 'start')
.attr('dy', '.25em')
.text(labels.current)
})
},
[
GRAPH_MARGIN,
buildTicks,
getPastAndCurrentDayLabels,
x,
y,
period,
buildAreas,
data,
offset,
setSelectionCoords,
setSelectionData,
setSelectionDateInterval
]
)
const formatTicksText = useCallback(
() =>
d3
.selectAll('.tick text')
.style('stroke', fontColor)
.style('fill', fontColor)
.style('stroke-width', 0.5)
.style('font-family', fontSecondary),
[]
)
const formatText = useCallback(
() =>
d3
.selectAll('text')
.style('stroke', offColor)
.style('fill', offColor)
.style('stroke-width', 0.5)
.style('font-family', fontSecondary),
[]
)
const formatTicks = useCallback(() => {
d3.selectAll('.tick line')
.style('stroke', primaryColor)
.style('fill', primaryColor)
}, [])
const buildAvg = useCallback(
g => {
g.attr('stroke', primaryColor)
.attr('stroke-width', 3)
.attr('stroke-dasharray', '10, 5')
.call(g =>
g
.append('line')
.attr(
'y1',
0.5 + y(d3.mean(data, d => new BigNumber(d.fiat).toNumber()) ?? 0)
)
.attr(
'y2',
0.5 + y(d3.mean(data, d => new BigNumber(d.fiat).toNumber()) ?? 0)
)
.attr('x1', GRAPH_MARGIN.left)
.attr('x2', GRAPH_WIDTH - GRAPH_MARGIN.right)
)
},
[GRAPH_MARGIN, y, data]
)
const drawData = useCallback(
g => {
g.selectAll('circle')
.data(data)
.join('circle')
.attr('cx', d => {
const created = new Date(d.created)
return x(created.setTime(created.getTime() + offset))
})
.attr('cy', d => y(new BigNumber(d.fiat).toNumber()))
.attr('fill', d => (d.txClass === 'cashIn' ? java : neon))
.attr('r', 3.5)
},
[data, offset, x, y]
)
const drawChart = useCallback(() => {
const svg = d3
.select(ref.current)
.attr('viewBox', [0, 0, GRAPH_WIDTH, GRAPH_HEIGHT])
svg.append('g').call(buildGrid)
svg.append('g').call(buildAvg)
svg.append('g').call(buildXAxis)
svg.append('g').call(buildYAxis)
svg.append('g').call(formatTicksText)
svg.append('g').call(formatText)
svg.append('g').call(formatTicks)
svg.append('g').call(drawData)
return svg.node()
}, [
buildAvg,
buildGrid,
buildXAxis,
buildYAxis,
drawData,
formatText,
formatTicks,
formatTicksText
])
useEffect(() => {
d3.select(ref.current)
.selectAll('*')
.remove()
drawChart()
}, [drawChart])
return <svg ref={ref} />
}
export default memo(
Graph,
(prev, next) =>
R.equals(prev.period, next.period) &&
R.equals(prev.selectedMachine, next.selectedMachine)
)
|
tobico/seaturtle
|
src/util/string-to-proc.js
|
// Converts a string to a function returns the named attribute of it's first
// parameter, or (this) object.
//
// If given attribute is a function, it will be called with any additional
// arguments provided to stringToProc, and the result returned.
export const stringToProc = (string, passArgs) => {
if (passArgs == null) { passArgs = []; }
return function(o) {
if (o && (o[string] !== undefined)) {
if (o[string] && o[string].apply) {
return o[string].apply(o, passArgs);
} else {
return o[string];
}
} else {
return null;
}
};
}
|
oslab-swrc/juxta
|
analyzer/dbg.py
|
<reponame>oslab-swrc/juxta<gh_stars>10-100
# SPDX-License-Identifier: MIT
import os
import sys
#
# usage:
#
# import dbg
#
# dbg.test("#B<red text#>")
# dbg.info("this is info")
# dbg.error("this is #R<error#>")
#
# dbg.save_to_file(path, color=T/F)
# dbg.set_queit(tags)
# dbg.set_header()
#
# <<func>> : function name
# <<line>> : line number
# <<file>> : file name
# <<tag>> : tag name
# #B< : blue
# #R< : red
# #G< : green
# #Y< : yellow
# #C< : cyan
# #> : end mark
#
_header = "'[#R<%-10s#>|#B<%-20s#>] ' % (<<tag>>,<<func>>)"
# reference color
_BLACK, _RED, _GREEN, _YELLOW, _BLUE, _MAGENTA, _CYAN, _WHITE = range(8)
def _currentframe() :
try :
raise Exception
except :
return sys.exc_info()[2].tb_frame.f_back
def _formatting(msg, tag, rv) :
h = msg
h = h.replace("<<tag>>", repr(str(tag)))
h = h.replace("<<func>>", repr(rv[2]))
h = h.replace("<<line>>", repr(rv[1]))
h = h.replace("<<file>>", repr(rv[0]))
return _coloring(eval(h))
def _coloring(msg) :
h = msg
h = h.replace("#B<", "\033[3%dm" % _BLUE)
h = h.replace("#G<", "\033[3%dm" % _GREEN)
h = h.replace("#R<", "\033[3%dm" % _RED)
h = h.replace("#Y<", "\033[3%dm" % _YELLOW)
h = h.replace("#C<", "\033[3%dm" % _CYAN)
h = h.replace("#>" , "\033[m")
return h
# when ignored, don't stringify the objects
def _dbg(tag, fmt, *msglist):
f = _currentframe()
# use frame of caller's caller, as this is called via wrapper
# static methods of the dbg class
if f is not None:
f = f.f_back.f_back
# look up frames
rv = "(unknown file)", 0, "(unknown function)"
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename in [__file__, "<string>"]:
f = f.f_back
continue
rv = (filename, f.f_lineno, co.co_name)
break
# convert to str
msg = fmt % msglist
sys.stderr.write(("%s %s\n" % (_formatting(_header, tag, rv),
_coloring(msg))))
global settings
settings = None
def _stop():
import pdb
pdb.Pdb().set_trace(sys._getframe().f_back)
def _quiet(tags):
global settings
settings = tags
# attribute factories
class Wrapper(object):
def __init__(self, wrapped):
self.wrapped = wrapped
def __getattr__(self, tag):
global settings
if tag == "stop":
return _stop
if tag == "quiet":
return _quiet
if settings is None or not tag in settings:
return lambda *fmt: _dbg(tag, *fmt)
return lambda *fmt: None
sys.modules[__name__] = Wrapper(sys.modules[__name__])
|
alinous-core/codable-cash
|
src_smartcontract/lang/sc_declare_types/CharType.cpp
|
/*
* CharType.cpp
*
* Created on: 2019/01/19
* Author: iizuka
*/
#include "lang/sc_declare_types/CharType.h"
#include "base/UnicodeString.h"
namespace alinous {
const UnicodeString CharType::TYPE_NAME(L"char");
CharType::CharType() : AbstractPrimitiveType(CodeElement::TYPE_CHAR) {
}
CharType::~CharType() {
}
int CharType::binarySize() const {
int total = sizeof(uint16_t);
total += AbstractType::binarySize();
return total;
}
void CharType::toBinary(ByteBuffer* out) {
out->putShort(CodeElement::TYPE_CHAR);
AbstractType::toBinary(out);
}
void CharType::fromBinary(ByteBuffer* in) {
AbstractType::fromBinary(in);
}
const UnicodeString* CharType::toString() noexcept {
return &TYPE_NAME;
}
} /* namespace alinous */
|
inugami-dev64/libdas
|
src/GLTFCompiler.cpp
|
<reponame>inugami-dev64/libdas<filename>src/GLTFCompiler.cpp<gh_stars>0
#define GLTF_COMPILER_CPP
#include <GLTFCompiler.h>
namespace Libdas {
GLTFCompiler::GLTFCompiler(const std::string &_out_file) : DasWriterCore(_out_file) {}
GLTFCompiler::GLTFCompiler(const GLTFRoot &_root, const DasProperties &_props, const std::string &_out_file) {
Compile(_root, _props, _out_file);
}
void GLTFCompiler::_CheckAndSupplementProperties(GLTFRoot &_root, DasProperties &_props) {
if(_props.author == "")
_props.author = _root.asset.generator;
if(_props.copyright == "")
_props.copyright = _root.asset.copyright;
// check if default scene exists
if(_root.load_time_scene)
_props.default_scene = static_cast<uint32_t>(_root.load_time_scene);
}
void GLTFCompiler::_FlagBuffersAccordingToMeshes(const GLTFRoot &_root, std::vector<DasBuffer> &_buffers) {
std::unordered_map<std::string, BufferType> attribute_type_map = {
std::make_pair("POSITION", LIBDAS_BUFFER_TYPE_VERTEX),
std::make_pair("NORMAL", LIBDAS_BUFFER_TYPE_VERTEX_NORMAL),
std::make_pair("TANGENT", LIBDAS_BUFFER_TYPE_VERTEX_TANGENT),
std::make_pair("TEXCOORD_", LIBDAS_BUFFER_TYPE_TEXTURE_MAP),
std::make_pair("COLOR_", LIBDAS_BUFFER_TYPE_COLOR),
std::make_pair("JOINTS_", LIBDAS_BUFFER_TYPE_JOINTS),
std::make_pair("WEIGHTS_", LIBDAS_BUFFER_TYPE_WEIGHTS)
};
// for each mesh in root
for(auto it = _root.meshes.begin(); it != _root.meshes.end(); it++) {
// for each primitive in mesh
for(size_t i = 0; i < it->primitives.size(); i++) {
int32_t size = static_cast<int32_t>(_buffers.size());
// lol @ ur bugs with broken GLTF files
if(it->primitives[i].indices != INT32_MAX)
_buffers[it->primitives[i].indices % size].type |= LIBDAS_BUFFER_TYPE_INDICES;
// check into attributes
for(auto map_it = it->primitives[i].attributes.begin(); map_it != it->primitives[i].attributes.end(); it++) {
std::string no_nr = String::RemoveNumbers(map_it->first);
// error check
if(attribute_type_map.find(no_nr) == attribute_type_map.end()) {
std::cerr << "GLTF error: No valid attribute '" << map_it->first << "' available for current implementation" << std::endl;
std::exit(1);
}
}
}
}
}
std::vector<DasBuffer> GLTFCompiler::_CreateBuffers(const GLTFRoot &_root) {
std::vector<DasBuffer> buffers;
m_buffers_size = _root.buffers.size();
m_images_size = _root.images.size();
// append buffers
for(auto it = _root.buffers.begin(); it != _root.buffers.end(); it++) {
DasBuffer buffer;
buffer.type = LIBDAS_BUFFER_TYPE_UNKNOWN;
buffer.data_len = it->byte_length;
buffer.data_ptrs.push_back(std::make_pair<const char*, size_t>(reinterpret_cast<const char*>(_root.resources.at(it->uri).data()), buffer.data_len));
buffers.push_back(buffer);
}
// append images
for(auto it = _root.images.begin(); it != _root.images.end(); it++) {
DasBuffer buffer;
// there are two possibilities:
// 1. the image is defined with its uri
// 2. the image is defined in some buffer view
if(it->uri != "") {
const std::vector<char> &resource = _root.resources.at(it->uri);
buffer.data_len = resource.size();
buffer.data_ptrs.push_back(std::make_pair(resource.data(), resource.size()));
BufferImageTypeResolver resolver;
resolver.FindImageBufferTypeFromResource(resource, it->uri);
buffer.type |= resolver.GetResolvedType();
}
else {
BufferImageTypeResolver resolver;
resolver.ResolveFromBufferView(it->mime_type);
buffer.type |= resolver.GetResolvedType();
}
buffers.push_back(buffer);
}
_FlagBuffersAccordingToMeshes(_root, buffers);
return buffers;
}
std::vector<DasMesh> GLTFCompiler::_CreateMeshes(const GLTFRoot &_root) {
std::vector<DasMesh> meshes;
return meshes;
}
std::vector<DasNode> GLTFCompiler::_CreateNodes(const GLTFRoot &_root) {
std::vector<DasNode> nodes;
return nodes;
}
std::vector<DasScene> GLTFCompiler::_CreateScenes(const GLTFRoot &_root) {
std::vector<DasScene> scenes;
return scenes;
}
std::vector<DasSkeleton> GLTFCompiler::_CreateSkeletons(const GLTFRoot &_root) {
std::vector<DasSkeleton> skeletons;
return skeletons;
}
std::vector<DasSkeletonJoint> GLTFCompiler::_CreateSkeletonJoints(const GLTFRoot &_root) {
std::vector<DasSkeletonJoint> joints;
return joints;
}
std::vector<DasAnimation> GLTFCompiler::_CreateAnimations(const GLTFRoot &_root) {
std::vector<DasAnimation> animations;
return animations;
}
void GLTFCompiler::Compile(const GLTFRoot &_root, const DasProperties &_props, const std::string &_out_file) {
// check if new file should be opened
if(_out_file != "")
NewFile(_out_file);
_CheckAndSupplementProperties(const_cast<GLTFRoot&>(_root), const_cast<DasProperties&>(_props));
InitialiseFile(_props);
// write buffers to file
std::vector<DasBuffer> buffers = _CreateBuffers(_root);
for(auto it = buffers.begin(); it != buffers.end(); it++)
WriteBuffer(*it);
// write meshes to the file
std::vector<DasMesh> meshes = _CreateMeshes(_root);
for(auto it = meshes.begin(); it != meshes.end(); it++)
WriteMesh(*it);
// write scene nodes to the file
std::vector<DasNode> nodes = _CreateNodes(_root);
for(auto it = nodes.begin(); it != nodes.end(); it++)
WriteNode(*it);
// write scenes to the file
std::vector<DasScene> scenes = _CreateScenes(_root);
for(auto it = scenes.begin(); it != scenes.end(); it++)
WriteScene(*it);
// write skeletons to the file
std::vector<DasSkeleton> skeletons = _CreateSkeletons(_root);
for(auto it = skeletons.begin(); it != skeletons.end(); it++)
WriteSkeleton(*it);
// write skeleton joints to the file
std::vector<DasSkeletonJoint> joints = _CreateSkeletonJoints(_root);
for(auto it = joints.begin(); it != joints.end(); it++)
WriteSkeletonJoint(*it);
// write animations to file
std::vector<DasAnimation> animations = _CreateAnimations(_root);
for(auto it = animations.begin(); it != animations.end(); it++)
WriteAnimation(*it);
}
}
|
ManonGros/colplus-backend
|
colplus-api/src/test/java/org/col/common/kryo/ApiKryoFactoryTest.java
|
<gh_stars>0
package org.col.common.kryo;
import java.io.ByteArrayOutputStream;
import java.util.List;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.google.common.collect.Lists;
import org.col.api.TestEntityGenerator;
import org.col.api.model.*;
import org.col.api.vocab.Issue;
import org.gbif.dwc.terms.*;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
*
*/
public class ApiKryoFactoryTest {
Kryo kryo = new ApiKryoFactory().create();
@Test
public void testName() throws Exception {
Name n = TestEntityGenerator.newName("1234567");
assertSerde(n);
}
@Test
public void testReference() throws Exception {
Reference r = new Reference();
r.setId("1234");
r.setYear(1984);
r.setDatasetKey(77);
r.setCsl(TestEntityGenerator.createCsl());
assertSerde(r);
}
@Test
public void testVerbatim() throws Exception {
List<Term> terms = Lists.newArrayList(
DwcTerm.scientificName, DwcTerm.associatedOrganisms, DwcTerm.taxonID,
DcTerm.title,
GbifTerm.canonicalName,
IucnTerm.threatStatus,
AcefTerm.Family,
UnknownTerm.build("http://gbif.org/abcdefg")
);
assertSerde(terms);
VerbatimRecord rec = TestEntityGenerator.createVerbatim();
for (Issue issue : Issue.values()) {
rec.addIssue(issue);
}
assertSerde(rec);
}
@Test
public void testEmptyModels() throws Exception {
assertSerde(new Taxon());
assertSerde(new Name());
assertSerde(new Reference());
assertSerde(new Dataset());
assertSerde(new DatasetImport());
}
private void assertSerde(Object obj) {
ByteArrayOutputStream buffer = new ByteArrayOutputStream(128);
Output output = new Output(buffer);
kryo.writeObject(output, obj);
output.close();
byte[] bytes = buffer.toByteArray();
final Input input = new Input(bytes);
Object obj2 = kryo.readObject(input, obj.getClass());
assertEquals(obj, obj2);
}
}
|
open-o/sdno-brs
|
mss-cbb/org.openo.sdno.mss.dao/src/test/java/org/openo/sdno/mss/dao/io/SqlSessionProxyTest.java
|
<reponame>open-o/sdno-brs
/*
* Copyright 2016 Huawei Technologies Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openo.sdno.mss.dao.io;
import static org.junit.Assert.assertEquals;
import java.sql.BatchUpdateException;
import java.util.ArrayList;
import java.util.List;
import org.apache.ibatis.exceptions.PersistenceException;
import org.apache.ibatis.executor.BatchExecutorException;
import org.apache.ibatis.executor.BatchResult;
import org.apache.ibatis.session.defaults.DefaultSqlSession;
import org.junit.Test;
import mockit.Mock;
import mockit.MockUp;
/**
* SqlSessionProxy test class.<br>
*
* @author
* @version SDNO 0.5 July 26, 2016
*/
public class SqlSessionProxyTest {
private PersistenceException ex = new PersistenceException();
private Throwable th = new Throwable();
private BatchExecutorException batchException =
new BatchExecutorException("message", new BatchUpdateException(), null, null);
@Test
public void testInsert() {
new MockUp<DefaultSqlSession>() {
@Mock
public int insert(String statement) {
return 99;
}
@Mock
public void close() {
}
};
SqlSessionProxy sqlProxy = new SqlSessionProxy(5, new DefaultSqlSession(null, null));
sqlProxy.tryTheBestCommit();
assertEquals(99, sqlProxy.insert("statement"));
sqlProxy.close();
}
@Test
public void testCommitException() {
new MockUp<DefaultSqlSession>() {
@Mock
public int insert(String statement) {
return 99;
}
@Mock
public void close() {
}
@Mock
public void clearCache() {
}
@Mock
public void commit() {
ex.initCause(th);
throw ex;
}
};
DefaultSqlSession session = new DefaultSqlSession(null, null);
SqlSessionProxy sqlProxy = new SqlSessionProxy(5, session);
sqlProxy.insert("statement");
sqlProxy.tryTheBestCommit();
sqlProxy.close();
assertEquals(th, ex.getCause());
}
@Test
public void testCommitExceptionListNull() {
new MockUp<DefaultSqlSession>() {
@Mock
public int insert(String statement) {
return 99;
}
@Mock
public void close() {
}
@Mock
public void clearCache() {
}
@Mock
public void commit() {
ex.initCause(batchException);
throw ex;
}
};
DefaultSqlSession session = new DefaultSqlSession(null, null);
SqlSessionProxy sqlProxy = new SqlSessionProxy(5, session);
sqlProxy.insert("statement");
sqlProxy.tryTheBestCommit();
sqlProxy.close();
assertEquals(batchException, ex.getCause());
}
@Test
public void testCommitExceptionListNotNull() {
BatchResult batchResult = new BatchResult(null, "sql");
int updateCounts[] = new int[1];
batchResult.setUpdateCounts(updateCounts);
List<BatchResult> resultList = new ArrayList<BatchResult>();
resultList.add(batchResult);
BatchUpdateException updateException = new BatchUpdateException("reason", "SQLState", 1, updateCounts);
final BatchExecutorException exception =
new BatchExecutorException("message", updateException, resultList, batchResult);
new MockUp<DefaultSqlSession>() {
@Mock
public int insert(String statement) {
return 99;
}
@Mock
public void close() {
}
@Mock
public void clearCache() {
}
@Mock
public void commit() {
ex.initCause(exception);
throw ex;
}
};
DefaultSqlSession session = new DefaultSqlSession(null, null);
SqlSessionProxy sqlProxy = new SqlSessionProxy(5, session);
sqlProxy.insert("statement1");
sqlProxy.insert("statement2");
sqlProxy.insert("statement3");
sqlProxy.insert("statement4");
sqlProxy.tryTheBestCommit();
sqlProxy.close();
assertEquals(exception, ex.getCause());
}
}
|
35niavlys/teeworlds-fng2-mod
|
src/game/client/components/maplayers.h
|
<gh_stars>10-100
/* (c) <NAME>. See licence.txt in the root of the distribution for more information. */
/* If you are missing that file, acquire a complete release at teeworlds.com. */
#ifndef GAME_CLIENT_COMPONENTS_MAPLAYERS_H
#define GAME_CLIENT_COMPONENTS_MAPLAYERS_H
#include <game/client/component.h>
class CMapLayers : public CComponent
{
CLayers *m_pLayers; // todo refactor: maybe remove it and access it through client*
int m_Type;
int m_CurrentLocalTick;
int m_LastLocalTick;
bool m_EnvelopeUpdate;
void MapScreenToGroup(float CenterX, float CenterY, CMapItemGroup *pGroup);
static void EnvelopeEval(float TimeOffset, int Env, float *pChannels, void *pUser);
public:
enum
{
TYPE_BACKGROUND=0,
TYPE_FOREGROUND,
};
CMapLayers(int Type);
virtual void OnInit();
virtual void OnRender();
void EnvelopeUpdate();
};
#endif
|
ajayesivan/react-native-remix-icon
|
src/icons/Folder5Fill.js
|
import * as React from "react";
import Svg, { Path } from "react-native-svg";
function SvgFolder5Fill(props) {
return (
<Svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" {...props}>
<Path fill="none" d="M0 0h24v24H0z" />
<Path d="M13.414 5H20a1 1 0 011 1v1H3V4a1 1 0 011-1h7.414l2 2zM3.087 9h17.826a1 1 0 01.997 1.083l-.834 10a1 1 0 01-.996.917H3.92a1 1 0 01-.996-.917l-.834-10A1 1 0 013.087 9z" />
</Svg>
);
}
export default SvgFolder5Fill;
|
sufyan739/AlfaSdk_Android
|
AlfaSdk/src/main/java/com/example/alfasdk/Models/LinksModel/Response.java
|
package com.example.alfasdk.Models.LinksModel;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.ArrayList;
import java.util.List;
public class Response {
@SerializedName("MSGTYPE")
@Expose
private String MSGTYPE;
@SerializedName("links")
@Expose
private List<Link> links = new ArrayList<Link>();
/**
* No args constructor for use in serialization
*/
public Response() {
}
/**
* @param mSGTYPE
* @param links
*/
public Response(String mSGTYPE, List<Link> links) {
this.MSGTYPE = mSGTYPE;
this.links = links;
}
/**
* @return The mSGTYPE
*/
public String getMSGTYPE() {
return MSGTYPE;
}
/**
* @param MSGTYPE The MSGTYPE
*/
public void setMSGTYPE(String MSGTYPE) {
this.MSGTYPE = MSGTYPE;
}
/**
* @return The links
*/
public List<Link> getLinks() {
return links;
}
/**
* @param links The links
*/
public void setLinks(List<Link> links) {
this.links = links;
}
}
|
thehyve/hypercube-api-server
|
src/main/java/nl/thehyve/hypercubeapi/patientset/QueryStatusTypeEntity.java
|
<reponame>thehyve/hypercube-api-server<gh_stars>0
package nl.thehyve.hypercubeapi.patientset;
import lombok.*;
import javax.persistence.*;
@Entity
@Table(schema = "i2b2demodata", name = "qt_query_status_type")
@Data @Builder @NoArgsConstructor @AllArgsConstructor @EqualsAndHashCode(of = "id")
public class QueryStatusTypeEntity {
@Id
@GeneratedValue
@Column(name = "status_type_id")
private Long id;
@Column(name = "name", length = 100)
private String name;
@Column(name = "description", length = 200)
private String description;
}
|
hhoover/kuma
|
pkg/xds/envoy/listeners/v3/http_access_log_configurer.go
|
package v3
import (
envoy_listener "github.com/envoyproxy/go-control-plane/envoy/config/listener/v3"
envoy_hcm "github.com/envoyproxy/go-control-plane/envoy/extensions/filters/network/http_connection_manager/v3"
)
const defaultHttpAccessLogFormat = `[%START_TIME%] %KUMA_MESH% "%REQ(:METHOD)% %REQ(X-ENVOY-ORIGINAL-PATH?:PATH)% %PROTOCOL%" %RESPONSE_CODE% %RESPONSE_FLAGS% %BYTES_RECEIVED% %BYTES_SENT% %DURATION% %RESP(X-ENVOY-UPSTREAM-SERVICE-TIME)% "%REQ(X-FORWARDED-FOR)%" "%REQ(USER-AGENT)%" "%REQ(X-B3-TRACEID?X-DATADOG-TRACEID)%" "%REQ(X-REQUEST-ID)%" "%REQ(:AUTHORITY)%" "%KUMA_SOURCE_SERVICE%" "%KUMA_DESTINATION_SERVICE%" "%KUMA_SOURCE_ADDRESS_WITHOUT_PORT%" "%UPSTREAM_HOST%"`
type HttpAccessLogConfigurer struct {
AccessLogConfigurer
}
func (c *HttpAccessLogConfigurer) Configure(filterChain *envoy_listener.FilterChain) error {
accessLog, err := convertLoggingBackend(c.AccessLogConfigurer.Mesh, c.AccessLogConfigurer.TrafficDirection, c.AccessLogConfigurer.SourceService, c.AccessLogConfigurer.DestinationService, c.AccessLogConfigurer.Backend, c.AccessLogConfigurer.Proxy, defaultHttpAccessLogFormat)
if err != nil {
return err
}
return UpdateHTTPConnectionManager(filterChain, func(hcm *envoy_hcm.HttpConnectionManager) error {
hcm.AccessLog = append(hcm.AccessLog, accessLog)
return nil
})
}
|
ChallenAi/algorithm
|
lc289/solution.go
|
package solution
func gameOfLife(board [][]int) {
for i := 0; i < len(board); i++ {
for j := 0; j < len(board[i]); j++ {
temp := cntLives(board, i, j)
if i == 1 && j == 0 {
}
if board[i][j] == 0 && temp == 3 {
board[i][j] = -1
continue
}
if board[i][j] == 1 && (temp < 2 || temp > 3) {
board[i][j] = 2
}
}
}
for i := 0; i < len(board); i++ {
for j := 0; j < len(board[i]); j++ {
if board[i][j] == -1 {
board[i][j] = 1
} else if board[i][j] == 2 {
board[i][j] = 0
}
}
}
}
func cntLives(board [][]int, i, j int) int {
rsu := 0
rsu += getPosInBoard(board, i-1, j-1)
rsu += getPosInBoard(board, i-1, j+1)
rsu += getPosInBoard(board, i-1, j)
rsu += getPosInBoard(board, i, j-1)
rsu += getPosInBoard(board, i, j+1)
rsu += getPosInBoard(board, i+1, j-1)
rsu += getPosInBoard(board, i+1, j+1)
rsu += getPosInBoard(board, i+1, j)
return rsu
}
func getPosInBoard(board [][]int, i, j int) int {
if i >= 0 && i < len(board) && j < len(board[i]) && j >= 0 {
if board[i][j] == 1 || board[i][j] == 2 {
return 1
}
}
return 0
}
|
aounhaider1/Crud2Go
|
eai-portal-domain-crudportlet/src/test/java/de/unioninvestment/eai/portal/portlet/crud/domain/container/TableQueryEventWrapperTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package de.unioninvestment.eai.portal.portlet.crud.domain.container;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.jdbc.core.RowMapper;
import com.vaadin.data.util.sqlcontainer.ColumnProperty;
import com.vaadin.data.util.sqlcontainer.RowId;
import com.vaadin.data.util.sqlcontainer.RowItem;
import com.vaadin.data.util.sqlcontainer.SQLContainer;
import com.vaadin.data.util.sqlcontainer.TemporaryRowId;
import com.vaadin.data.util.sqlcontainer.query.generator.StatementHelper;
import de.unioninvestment.eai.portal.portlet.crud.domain.database.ConnectionPool;
import de.unioninvestment.eai.portal.portlet.crud.domain.events.DeleteEvent;
import de.unioninvestment.eai.portal.portlet.crud.domain.events.DeleteEventHandler;
import de.unioninvestment.eai.portal.portlet.crud.domain.events.InsertEvent;
import de.unioninvestment.eai.portal.portlet.crud.domain.events.InsertEventHandler;
import de.unioninvestment.eai.portal.portlet.crud.domain.events.UpdateEvent;
import de.unioninvestment.eai.portal.portlet.crud.domain.events.UpdateEventHandler;
import de.unioninvestment.eai.portal.portlet.crud.domain.model.DataContainer;
import de.unioninvestment.eai.portal.portlet.crud.domain.support.AuditLogger;
import de.unioninvestment.eai.portal.portlet.crud.domain.test.commons.TestUser;
import de.unioninvestment.eai.portal.support.vaadin.mvp.EventRouter;
import de.unioninvestment.eai.portal.support.vaadin.table.OracleCrudSQLGenerator;
public class TableQueryEventWrapperTest {
private TableQueryEventWrapper tableQueryEventWrapper = null;
@Mock
private EventRouter<InsertEventHandler, InsertEvent> onInsertEventRouterMock;
@Mock
private EventRouter<UpdateEventHandler, UpdateEvent> onUpdateEventRouterMock;
@Mock
private EventRouter<DeleteEventHandler, DeleteEvent> onDeleteEventRouterMock;
@Mock
private DataContainer containerMock;
private String tableName = "TABLEQUERY_WRAPPER";
@Mock
private ConnectionPool connectionPoolMock;
@Mock
private Connection connectionMock;
@Mock
private DatabaseMetaData metaDataMock;
@Mock
private ResultSet tablesRSMock;
@Mock
private ResultSet pkRSMock;
@Mock
private PreparedStatement statementMock;
@Mock
private AuditLogger auditLoggerMock;
@Mock
private SQLContainer container;
private Collection<ColumnProperty> properties = new ArrayList<ColumnProperty>();
@SuppressWarnings("unchecked")
@Before
public void setUp() throws SQLException {
MockitoAnnotations.initMocks(this);
when(connectionPoolMock.reserveConnection()).thenReturn(connectionMock);
when(connectionMock.getMetaData()).thenReturn(metaDataMock);
when(connectionMock.prepareStatement(anyString())).thenReturn(
statementMock);
when(
connectionPoolMock.querySingleResultWithJdbcTemplate(
any(StatementHelper.class), any(RowMapper.class)))
.thenReturn(4711);
when(metaDataMock.getTables(null, null, tableName.toUpperCase(), null))
.thenReturn(tablesRSMock);
when(tablesRSMock.next()).thenReturn(true);
when(metaDataMock.getPrimaryKeys(null, null, tableName.toUpperCase()))
.thenReturn(pkRSMock);
when(pkRSMock.next()).thenReturn(true, false);
when(pkRSMock.getString("COLUMN_NAME")).thenReturn("ID");
OracleCrudSQLGenerator sqlGenerator = new OracleCrudSQLGenerator();
tableQueryEventWrapper = new TableQueryEventWrapper(containerMock,
tableName, connectionPoolMock, sqlGenerator,
onInsertEventRouterMock, onUpdateEventRouterMock,
onDeleteEventRouterMock, new TestUser("Anonymous"));
sqlGenerator.setPrimaryKeyColumns(tableQueryEventWrapper
.getPrimaryKeyColumns());
tableQueryEventWrapper.setAuditLogger(auditLoggerMock);
}
@Test
public void shouldGetIndexById() {
RowId rowId = new RowId(new Object[] { 1l });
assertThat(tableQueryEventWrapper.getIndexById(rowId), is(4710));
}
@Test
public void shouldStoreRowUpdate() throws SQLException {
RowId id = new RowId(new Object[] { "ID1" });
ColumnProperty cp1 = new ColumnProperty("ID1", true, true, true, false,
new Integer(1), Integer.TYPE);
cp1.setReadOnly(false);
properties.add(cp1);
ColumnProperty cp2 = new ColumnProperty("value1", true, true, true,
false, new Integer(1), Integer.TYPE);
cp2.setVersionColumn(true);
properties.add(cp2);
RowItem row = new RowItem(container, id, properties);
when(statementMock.executeUpdate()).thenReturn(1);
tableQueryEventWrapper.storeRow(row);
verify(auditLoggerMock)
.audit("UPDATE TABLEQUERY_WRAPPER SET \"ID1\" = ? WHERE \"value1\" = ? -> Attribute <ID1 : 1, value1 : 1, >");
}
@Test
public void shouldStoreRowImmediatelyInsert() throws SQLException {
RowId id = new TemporaryRowId(new Object[] { "ID1" });
ColumnProperty cp1 = new ColumnProperty("ID1", true, true, true,
false, new Integer(1), Integer.TYPE);
cp1.setReadOnly(false);
properties.add(cp1);
ColumnProperty cp2 = new ColumnProperty("value1", true, true, true,
false, new Integer(1), Integer.TYPE);
cp2.setVersionColumn(true);
properties.add(cp2);
RowItem row = new RowItem(container, id, properties);
when(
connectionMock.prepareStatement(
"INSERT INTO TABLEQUERY_WRAPPER (\"ID1\") VALUES (?)",
new String[] { "ID" })).thenReturn(statementMock);
when(statementMock.executeUpdate()).thenReturn(1);
when(statementMock.getGeneratedKeys()).thenReturn(tablesRSMock);
tableQueryEventWrapper.storeRowImmediately(row);
verify(auditLoggerMock)
.audit("INSERT INTO TABLEQUERY_WRAPPER (\"ID1\") VALUES (?) -> Attribute <ID1 : 1, value1 : 1, >");
}
@Test
public void shouldStoreRowInsert() throws SQLException {
RowId id = new TemporaryRowId(new Object[] { "ID1" });
ColumnProperty cp1 = new ColumnProperty("ID1", true, true, true,
false, new Integer(1), Integer.TYPE);
cp1.setReadOnly(false);
properties.add(cp1);
ColumnProperty cp2 = new ColumnProperty("value1", true, true, true,
false, new Integer(1), Integer.TYPE);
cp2.setVersionColumn(true);
properties.add(cp2);
RowItem row = new RowItem(container, id, properties);
when(
connectionMock.prepareStatement(
"INSERT INTO TABLEQUERY_WRAPPER (\"ID1\") VALUES (?)",
new String[] { "ID" })).thenReturn(statementMock);
when(statementMock.executeUpdate()).thenReturn(1);
tableQueryEventWrapper.storeRow(row);
verify(auditLoggerMock)
.audit("INSERT INTO TABLEQUERY_WRAPPER (\"ID1\") VALUES (?) -> Attribute <ID1 : 1, value1 : 1, >");
}
@Test
public void shouldRemoveRow() throws SQLException {
RowId id = new TemporaryRowId(new Object[] { "ID" });
ColumnProperty cp1 = new ColumnProperty("ID", false, true, true,
false, new Integer(1), Integer.TYPE);
cp1.setReadOnly(true);
properties.add(cp1);
ColumnProperty cp2 = new ColumnProperty("value1", false, true, true,
false, new Integer(1), Integer.TYPE);
cp2.setVersionColumn(true);
properties.add(cp2);
RowItem row = new RowItem(container, id, properties);
when(statementMock.executeUpdate()).thenReturn(1);
tableQueryEventWrapper.removeRow(row);
verify(auditLoggerMock)
.audit("DELETE FROM TABLEQUERY_WRAPPER WHERE \"ID\" = ? -> Attribute <ID : 1, value1 : 1, >");
}
}
|
mwahba/euler
|
CodeEval/src/net/mwahba/codeeval/easy/MultiplesOfNumber.java
|
<gh_stars>1-10
package net.mwahba.codeeval.easy;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
public class MultiplesOfNumber {
/**
* @param args
* @throws IOException
* @throws NumberFormatException
*/
public static void main(String[] args) throws IOException {
File file = new File(args[0]);
BufferedReader buffer = new BufferedReader(new FileReader(file));
String line;
while ((line = buffer.readLine()) != null) {
line = line.trim();
String[] lineContents = line.split(",");
int limit = Integer.parseInt(lineContents[0]), multiplicand = Integer.parseInt(lineContents[1]), currentNum = 1;
while (currentNum * multiplicand < limit) {
currentNum++;
}
System.out.println(currentNum * multiplicand);
}
buffer.close();
}
}
|
XpressAI/frovedis
|
src/frovedis/ml/tree/bitcount.hpp
|
<gh_stars>10-100
#ifndef _BITCOUNT_HPP_
#define _BITCOUNT_HPP_
#include <climits>
#include <cstdint>
namespace frovedis {
// a constexpr recursive popcount function
template <typename T>
constexpr T CX_POPCOUNT(const T x) {
return (x == 0) ? 0 : (x & 1) + CX_POPCOUNT(x >> 1);
}
// 0xAB -> 0xABABABAB...
template <typename T>
constexpr T fill(const T byte, const T work = 0) {
return (byte > 0) ? (
fill(byte << CHAR_BIT, byte | work)
) : (
work
);
}
// <NAME>'s popcount algorithm
template <typename T>
inline T popcount(T x) {
constexpr T __0x55__ = fill<T>(0x55);
constexpr T __0x33__ = fill<T>(0x33);
constexpr T __0x0f__ = fill<T>(0x0f);
constexpr T __0x01__ = fill<T>(0x01);
constexpr size_t RSHIFT = CHAR_BIT * (sizeof(T) - 1);
x = x - ((x >> 1) & __0x55__);
x = (x & __0x33__) + ((x >> 2) & __0x33__);
x = (x + (x >> 4)) & __0x0f__;
return (x * __0x01__) >> RSHIFT;
}
// count the number of trailing zeros
template <typename T>
inline T ntzcount(const T x) {
return popcount((~x) & (x - 1));
}
// only the highest bit is one (from Hacker's Delight)
template <typename T>
inline T hibit(T x) {
x |= (x >> 1);
x |= (x >> 2);
x |= (x >> 4);
x |= (x >> 8);
x |= (x >> 16);
x |= (x >> 32);
return x - (x >> 1);
}
} // end namespace frovedis
#endif
|
ju-liu/krylov
|
src/krylov/gmres.py
|
<reponame>ju-liu/krylov<gh_stars>1-10
from typing import Callable, Optional
import numpy as np
import scipy.linalg
from numpy.typing import ArrayLike
from ._helpers import (
Identity,
Info,
LinearOperator,
Product,
aslinearoperator,
get_default_inner,
)
from .arnoldi import ArnoldiHouseholder, ArnoldiMGS
from .givens import givens
def multi_matmul(A, b):
"""A @ b for many A, b (i.e., A.shape == (m,n,...), y.shape == (n,...))"""
return np.einsum("ij...,j...->i...", A, b)
def multi_solve_triangular(A, B):
"""This function calls scipy.linalg.solve_triangular for every single A. A
vectorized version would be much better here.
"""
A_shape = A.shape
a = A.reshape(A.shape[0], A.shape[1], -1)
b = B.reshape(B.shape[0], -1)
y = []
for k in range(a.shape[2]):
if np.all(b[:, k] == 0.0):
y.append(np.zeros(b[:, k].shape))
else:
y.append(scipy.linalg.solve_triangular(a[:, :, k], b[:, k]))
y = np.array(y).T.reshape([A_shape[0]] + list(A_shape[2:]))
return y
def gmres(
A: LinearOperator,
b: ArrayLike,
M: Optional[LinearOperator] = None,
Ml: Optional[LinearOperator] = None,
Mr: Optional[LinearOperator] = None,
inner: Optional[Callable] = None,
ortho: str = "mgs",
x0: Optional[ArrayLike] = None,
tol: float = 1e-5,
atol: float = 1.0e-15,
maxiter: Optional[int] = None,
callback: Optional[Callable] = None,
):
r"""Preconditioned GMRES method.
The *preconditioned generalized minimal residual method* can be used to
solve a system of linear algebraic equations. Let the following linear
algebraic system be given:
.. math::
M M_l A M_r y = M M_l b,
where :math:`x=M_r y`.
The preconditioned GMRES method then computes (in exact arithmetics!)
iterates :math:`x_k \in x_0 + M_r K_k` with
:math:`K_k:= K_k(M M_l A M_r, r_0)` such that
.. math::
\|M M_l(b - A x_k)\|_{M^{-1}} =
\min_{z \in x_0 + M_r K_k} \|M M_l (b - A z)\|_{M^{-1}}.
The Arnoldi alorithm is used with the operator
:math:`M M_l A M_r` and the inner product defined by
:math:`\langle x,y \rangle_{M^{-1}} = \langle M^{-1}x,y \rangle`.
The initial vector for Arnoldi is
:math:`r_0 = M M_l (b - Ax_0)` - note that :math:`M_r` is not used for
the initial vector.
Memory consumption is about maxiter+1 vectors for the Arnoldi basis.
If :math:`M` is used the memory consumption is 2*(maxiter+1).
If the operator :math:`M_l A M_r` is self-adjoint then consider using
the MINRES method :py:class:`Minres`.
"""
def _get_xk(y):
if y is None:
return x0
k = arnoldi.iter
if k > 0:
yy = multi_solve_triangular(R[:k, :k], y)
# The last is always 0, so we could skip it, too
# yk = sum(c * v for c, v in zip(yy, V[:-1]))
yk = sum(c * v for c, v in zip(yy, arnoldi.V))
return x0 + Mr @ yk
return x0
def get_residual_norm(z):
# \\| M M_l (b-Az)\\|_{M^{-1}}
return get_residual_and_norm(z)[2]
def get_residual_and_norm(z):
Ml_r = Ml @ (b - A @ z)
M_Ml_r = M @ Ml_r
norm2 = inner(Ml_r, M_Ml_r)
if np.any(norm2.imag != 0.0):
raise ValueError("inner product <x, M x> gave nonzero imaginary part")
norm2 = norm2.real
return M_Ml_r, Ml_r, np.sqrt(norm2)
b = np.asarray(b)
assert len(A.shape) == 2
assert A.shape[0] == A.shape[1]
assert A.shape[1] == b.shape[0]
M = Identity() if M is None else aslinearoperator(M)
Ml = Identity() if Ml is None else aslinearoperator(Ml)
Mr = Identity() if Mr is None else aslinearoperator(Mr)
inner_is_none = inner is None
inner = get_default_inner(b.shape) if inner is None else inner
maxiter = A.shape[0] if maxiter is None else maxiter
if x0 is None:
x0 = np.zeros_like(b)
x0 = np.asarray(x0)
# get initial residual
M_Ml_r0, Ml_r0, M_Ml_r0_norm = get_residual_and_norm(x0)
Ml_A_Mr = Product(Ml, A, Mr)
resnorms = [M_Ml_r0_norm]
if callback is not None:
callback(x0, Ml_r0)
# initialize Arnoldi
if ortho.startswith("mgs"):
num_reorthos = 1 if len(ortho) == 3 else int(ortho[3:])
arnoldi = ArnoldiMGS(
Ml_A_Mr,
Ml_r0,
num_reorthos=num_reorthos,
M=M,
Mv=M_Ml_r0,
Mv_norm=M_Ml_r0_norm,
inner=inner,
)
else:
assert ortho == "householder"
assert inner_is_none
assert isinstance(M, Identity)
arnoldi = ArnoldiHouseholder(Ml_A_Mr, Ml_r0)
# Givens rotations:
G = []
# QR decomposition of Hessenberg matrix via Givens and R
dtype = M_Ml_r0.dtype
R = np.zeros([maxiter + 1, maxiter] + list(b.shape[1:]), dtype=dtype)
y = np.zeros([maxiter + 1] + list(b.shape[1:]), dtype=dtype)
# Right-hand side of projected system:
y[0] = M_Ml_r0_norm
yk = None
xk = None
# iterate Arnoldi
k = 0
success = False
criterion = np.maximum(tol * resnorms[0], atol)
while True:
if np.all(resnorms[-1] <= criterion):
# oh really?
xk = _get_xk(yk) if xk is None else xk
resnorms[-1] = get_residual_norm(xk)
if np.all(resnorms[-1] <= criterion):
success = True
break
# # updated residual was below but explicit is not: warn
# warnings.warn(
# "updated residual is below tolerance, explicit residual is NOT!"
# f" (upd={resnorm} <= tol={tol} < exp={resnorms[-1]})"
# )
if k == maxiter:
break
# V is used in _get_xk()
_, h = next(arnoldi)
print(len(h), k + 2)
print(len(R))
# Copy new column from Arnoldi
print(h.shape, R.shape)
R[: k + 2, k] = h[: k + 2]
# Apply previous Givens rotations.
for i in range(k):
R[i : i + 2, k] = multi_matmul(G[i], R[i : i + 2, k])
# Compute and apply new Givens rotation.
g, r = givens(R[k : k + 2, k])
G.append(g)
R[k, k] = r
R[k + 1, k] = 0.0
y[k : k + 2] = multi_matmul(G[k], y[k : k + 2])
yk = y[: k + 1]
resnorm = np.abs(y[k + 1])
xk = None
# make this a numpy array to give the callback the change to override it
resnorm = np.array(resnorm)
if callback is not None:
xk = _get_xk(yk) if xk is None else xk
callback(xk, resnorm)
# convert back to scalar
resnorm = resnorm[()]
resnorms.append(resnorm)
k += 1
# compute solution if not yet done
if xk is None:
xk = _get_xk(y[: arnoldi.iter])
num_operations = {
"A": 1 + k,
"M": 2 + k,
"Ml": 2 + k,
"Mr": 1 + k,
"inner": 2 + k + k * (k + 1) / 2,
"axpy": 4 + 2 * k + k * (k + 1) / 2,
}
return xk if success else None, Info(
success, xk, k, resnorms, num_operations=num_operations
)
|
jakejaehee/scala-webtool
|
scala-webtool/src/test/scala/test/StringTest.scala
|
package test
object StringTest {
def main(args: Array[String]) {
val str = "abc.defg"
val t = str.span { _ != '.' }
println(t)
str.split('.').foreach { println }
val m = Map(1->2)
println(m.get(2).orElse(m.get(1)).map("a" + _))
println(m.get(1).orElse(m.get(2)).map("a" + _))
}
}
|
FuqiX/biosamples-v4
|
integration/src/main/java/uk/ac/ebi/biosamples/IntegrationProperties.java
|
<filename>integration/src/main/java/uk/ac/ebi/biosamples/IntegrationProperties.java
package uk.ac.ebi.biosamples;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.net.URI;
@Component
public class IntegrationProperties {
@Value("${biosamples.submissionuri.sampletab:http://localhost:8082}")
private URI biosampleSubmissionUriSampletab;
@Value("${biosamples.legacyxml.uri:http://localhost:8083}")
private URI biosamplesLegacyXMLUri;
@Value("${biosamples.legacyjson.uri:http://localhost:8084}")
private URI biosamplesLegacyJSONUri;
@Value("${biosamples.legacyapikey:#{null}}")
private String legacyApiKey;
public URI getBiosampleSubmissionUriSampleTab() {
return biosampleSubmissionUriSampletab;
}
public URI getBiosamplesLegacyXMLUri() {
return biosamplesLegacyXMLUri;
}
public String getLegacyApiKey() {
return legacyApiKey;
}
public URI getBiosamplesLegacyJSONUri() { return biosamplesLegacyJSONUri; }
}
|
evidation-health/bokeh
|
bokeh/enums.py
|
<gh_stars>1-10
"""Definitions of common enumerations to be used together with ``Enum`` property. """
from __future__ import absolute_import
from six import string_types
from . import colors, icons, palettes
class Enumeration(object):
__slots__ = ()
def __contains__(self, value):
if not self._case_sensitive:
value = value.lower()
return value in self._values
def __str__(self):
return "Enumeration(%s)" % ", ".join(self._values)
__repr__ = __str__
def enumeration(*values, **kwargs):
if not (values and all(isinstance(value, string_types) and value for value in values)):
raise ValueError("expected a non-empty sequence of strings, got %s" % values)
if len(values) != len(set(values)):
raise ValueError("enumeration items must be unique, got %s" % values)
attrs = dict([ (value, value) for value in values ])
attrs.update({
"_values": list(values),
"_default": values[0],
"_case_sensitive": kwargs.get("case_sensitive", True),
})
return type("Enumeration", (Enumeration,), attrs)()
LineJoin = enumeration("miter", "round", "bevel")
LineDash = enumeration("solid", "dashed", "dotted", "dotdash", "dashdot")
LineCap = enumeration("butt", "round", "square")
FontStyle = enumeration("normal", "italic", "bold")
TextAlign = enumeration("left", "right", "center")
TextBaseline = enumeration("top", "middle", "bottom", "alphabetic", "hanging")
Direction = enumeration("clock", "anticlock")
Units = enumeration("screen", "data")
SpatialUnits = Units
AngleUnits = enumeration("deg", "rad")
DatetimeUnits = enumeration("microseconds", "milliseconds", "seconds", "minsec",
"minutes", "hourmin", "hours", "days", "months", "years")
Dimension = enumeration("width", "height", "x", "y")
Anchor = enumeration("top_left", "top_center", "top_right", "right_center",
"bottom_right", "bottom_center", "bottom_left", "left_center", "center")
Location = enumeration("above", "below", "left", "right")
Orientation = enumeration("top_right", "top_left", "bottom_left", "bottom_right")
DashPattern = enumeration("solid", "dashed", "dotted", "dotdash", "dashdot")
ButtonType = enumeration("default", "primary", "success", "warning", "danger", "link")
NamedColor = enumeration(*colors.__colors__, case_sensitive=False)
NamedIcon = enumeration(*icons.__icons__)
Palette = enumeration(*palettes.__palettes__)
MapType = enumeration("satellite", "roadmap", "terrain", "hybrid")
DateFormat = enumeration("ATOM", "W3C", "RFC-3339", "ISO-8601", "COOKIE", "RFC-822",
"RFC-850", "RFC-1036", "RFC-1123", "RFC-2822", "RSS", "TICKS", "TIMESTAMP")
RoundingFunction = enumeration("round", "nearest", "floor", "rounddown", "ceil", "roundup")
NumeralLanguage = enumeration("be-nl", "chs", "cs", "da-dk", "de-ch", "de", "en",
"en-gb", "es-ES", "es", "et", "fi", "fr-CA", "fr-ch",
"fr", "hu", "it", "ja", "nl-nl", "pl", "pt-br",
"pt-pt", "ru", "ru-UA", "sk", "th", "tr", "uk-UA")
RenderLevel = enumeration("image", "underlay", "glyph", "annotation",
"overlay", "tool")
Aggregation = enumeration("sum", "mean", "count", "nunique", "median", "min", "max")
|
jaguar-zc/flyants-oauth2
|
fly-servcie-core/src/main/java/io/sufeng/context/domain/service/impl/AuthorizeServiceImpl.java
|
package io.sufeng.context.domain.service.impl;
import io.sufeng.context.domain.entity.People;
import io.sufeng.context.domain.entity.oauth2.OAuthAccessToken;
import io.sufeng.context.domain.entity.oauth2.OAuthAuthorizeRequest;
import io.sufeng.context.domain.entity.oauth2.OAuthClient;
import io.sufeng.context.domain.entity.oauth2.OAuthUserAuthorize;
import io.sufeng.context.domain.repository.OAuthAccessTokenRepository;
import io.sufeng.context.domain.repository.OAuthClientRepository;
import io.sufeng.context.domain.repository.OAuthUserAuthorizeRepository;
import io.sufeng.context.domain.repository.OAuthorizeRequestRepository;
import io.sufeng.context.domain.service.AuthorizeService;
import io.sufeng.context.utils.ResourceUtils;
import org.apache.oltu.oauth2.as.issuer.MD5Generator;
import org.apache.oltu.oauth2.as.issuer.OAuthIssuerImpl;
import org.apache.oltu.oauth2.as.issuer.ValueGenerator;
import org.apache.oltu.oauth2.common.exception.OAuthSystemException;
import io.sufeng.common.exception.BusinessException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
import java.util.Optional;
/**
* @Author zhangchao
* @Date 2019/4/25 15:34
* @Version v1.0
*/
@Transactional
@Service
public class AuthorizeServiceImpl implements AuthorizeService {
private ValueGenerator generator = new MD5Generator();
@Autowired
private OAuthorizeRequestRepository authorizeRequestRepository;
@Autowired
private OAuthUserAuthorizeRepository oAuthUserAuthorizeRepository;
@Autowired
private OAuthAccessTokenRepository oAuthAccessTokenRepository;
@Autowired
private OAuthClientRepository OAuthClientRepository;
@Override
public boolean checkClientId(String clientId) {
return OAuthClientRepository.findById(clientId).isPresent();
}
@Override
public OAuthClient findOAuthClinetByClientId(String clientId) {
return OAuthClientRepository.findById(clientId).orElseThrow(() -> new BusinessException("clientId不存在"));
}
@Override
public OAuthUserAuthorize findOauthUserAuthorizeByClientIdAndUserId(String clientId, String userId) {
return oAuthUserAuthorizeRepository.findByClientIdAndUserId(clientId,userId).orElseThrow(() -> new BusinessException("用户不存在"));
}
@Override
public boolean checkClientSecret(String clientId,String clientSecret) {
return OAuthClientRepository.findByClientIdAndClientSecret(clientId,clientSecret).isPresent();
}
@Override
public boolean checkRedirectUri(String clientId,String redirectURI) {
return OAuthClientRepository.findByClientIdAndClientRedirectUriHost(clientId,redirectURI).isPresent();
}
@Override
public boolean checkAuthCode(String authCode) {
Optional<OAuthAuthorizeRequest> oAuthAuthorizeRequest = authorizeRequestRepository.findByAuthorizationCode(authCode)
.filter(i -> i.getAuthState() == 2)
.filter(i -> i.getStatus() == 0);
return oAuthAuthorizeRequest.isPresent();
}
@Override
public OAuthAuthorizeRequest authorization(String response_type, String client_id, String redirect_uri, String scope, String state) {
People currentPeople = ResourceUtils.getCurrentPeople();
Optional<OAuthClient> client = OAuthClientRepository.findById(client_id);
OAuthAuthorizeRequest request = new OAuthAuthorizeRequest();
request.setCreationDate(new Date());
request.setModificationDate(new Date());
request.setStatus(0);
request.setAuthState(2);
try {
request.setAuthorizationCode(new OAuthIssuerImpl(generator).authorizationCode());
} catch (OAuthSystemException e) {
e.printStackTrace();
}
request.setUserId(currentPeople.getId());
request.setRedirectUri(redirect_uri);
request.setResponseType(response_type);
request.setState(state);
request.setClientId(client_id);
authorizeRequestRepository.saveAndFlush(request);
Optional<OAuthUserAuthorize> oAuthUserAuthorize = oAuthUserAuthorizeRepository.findByClientIdAndUserId(client_id, currentPeople.getId());
if(!oAuthUserAuthorize.isPresent()){
OAuthUserAuthorize oAuthUserAuthorize1 = oAuthUserAuthorize.orElseGet(() -> {
OAuthUserAuthorize userAuthorize = new OAuthUserAuthorize();
userAuthorize.setClientId(client_id);
try {
userAuthorize.setOauthUserId(new OAuthIssuerImpl(generator).authorizationCode());
} catch (OAuthSystemException e) {
e.printStackTrace();
}
userAuthorize.setUserId(currentPeople.getId());
userAuthorize.setOauthUserName(currentPeople.getNickName());
userAuthorize.setAuthorizeResource(client.get().getOAuthClientResource().getResource());
return userAuthorize;
});
oAuthUserAuthorizeRepository.saveAndFlush(oAuthUserAuthorize1);
}
return request;
}
@Override
public boolean checkAccessToken(String accessToken) {
return oAuthAccessTokenRepository.findByToken(accessToken).isPresent();
}
@Override
public Optional<String> findPeopleIdByOpenId(String accessToken, String openId) {
Optional<OAuthAccessToken> optionalOAuthAccessToken = oAuthAccessTokenRepository.findByToken(accessToken);
if(optionalOAuthAccessToken.isPresent()){
String clientId = optionalOAuthAccessToken.get().getClientId();
Optional<OAuthUserAuthorize> oAuthUserAuthorize = oAuthUserAuthorizeRepository.findByClientIdAndOauthUserId(clientId, openId);
oAuthUserAuthorize.orElseThrow(()->new BusinessException("error openId!"));
return Optional.of(oAuthUserAuthorize.get().getUserId());
}
return Optional.empty();
}
@Override
public OAuthAccessToken generatorAccessToken(String clientId, String code) {
OAuthIssuerImpl oAuthIssuer = new OAuthIssuerImpl(generator);
Optional<OAuthAuthorizeRequest> optional = authorizeRequestRepository.findByAuthorizationCode(code);
OAuthAuthorizeRequest oAuthAuthorizeRequest = optional.orElseThrow(() -> new BusinessException("code不存在"));
optional.filter(i -> i.getAuthState() == 2).orElseThrow(() -> new BusinessException("未同意授权"));
optional.filter(i -> i.getStatus() == 0).orElseThrow(() -> new BusinessException("code 已使用"));
oAuthAuthorizeRequest.setStatus(1);
authorizeRequestRepository.saveAndFlush(oAuthAuthorizeRequest);
Optional<OAuthUserAuthorize> oAuthUserAuthorize = oAuthUserAuthorizeRepository.findByClientIdAndUserId(clientId, optional.get().getUserId());
OAuthAccessToken oAuthAccessToken = null;
try {
oAuthAccessToken = new OAuthAccessToken();
oAuthAccessToken.setCreationDate(new Date());
oAuthAccessToken.setModificationDate(new Date());
oAuthAccessToken.setExpires(3600);
oAuthAccessToken.setRefreshToken(oAuthIssuer.refreshToken());
oAuthAccessToken.setResourceOwnerId(oAuthUserAuthorize.get().getOauthUserId());
oAuthAccessToken.setToken(oAuthIssuer.accessToken());
oAuthAccessToken.setClientId(clientId);
} catch (OAuthSystemException e) {
e.printStackTrace();
}
oAuthAccessTokenRepository.saveAndFlush(oAuthAccessToken);
return oAuthAccessToken;
}
}
|
YeeZC/java-profiler
|
profiler-operator/src/main/java/me/zyee/java/profiler/flame/FlameNodeTask.java
|
package me.zyee.java.profiler.flame;
import org.apache.commons.lang3.StringUtils;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.RecursiveTask;
import java.util.function.Predicate;
/**
* @author yee
* @version 1.0
* Create by yee on 2021/1/14
*/
class FlameNodeTask extends RecursiveTask<FlameNode> {
private final double min;
private final Elements elements;
private final FlameNode root;
private final Predicate<String> predicate;
FlameNodeTask(double min, Elements elements, Predicate<String> predicate) {
this(min, elements, null, predicate);
}
FlameNodeTask(double min, Elements elements, FlameNode root, Predicate<String> predicate) {
this.min = min;
this.elements = elements;
this.predicate = predicate;
if (null != root) {
this.root = root;
} else {
this.root = new FlameNode();
}
}
@Override
protected FlameNode compute() {
for (Element element : elements) {
Optional.ofNullable(element.selectFirst("span")).ifPresent(el -> {
final String span = el.text().replace("/", ".");
if (predicate.test(span)) {
final String div = element.selectFirst("div").text();
final double percent = Double.parseDouble(StringUtils.substringBetween(div, "] ", "%"));
if (percent < min) {
return;
}
final FlameNode node = new FlameNode();
node.setName(span);
final String count = StringUtils.substringBetween(div, "% ", " self");
final String selfPercent = StringUtils.substringBetween(div, ": ", "%");
final String selfCount = StringUtils.substringAfterLast(div, "% ");
node.setPercent(percent);
node.setCount(Long.parseLong(count.replace(",", "")));
node.setSelfPercent(Double.parseDouble(selfPercent));
node.setSelfCount(Long.parseLong(selfCount.replace(",", "")));
root.add(makeChildFlameNode(node, element));
} else {
makeChildFlameNode(root, element);
}
});
}
return new SimpleTask(root).fork().join();
}
private FlameNode makeChildFlameNode(FlameNode node, Element element) {
final Element ul = element.selectFirst("ul");
if (null != ul) {
final FlameNodeTask task = new FlameNodeTask(min, ul.children(), node, predicate);
return task.fork().join();
}
return node;
}
private static class SimpleTask extends RecursiveTask<FlameNode> {
private final FlameNode node;
SimpleTask(FlameNode node) {
this.node = node;
}
@Override
protected FlameNode compute() {
final List<FlameNode> children = node.getChildren();
List<FlameNode> result = new ArrayList<>();
boolean match = false;
for (FlameNode child : children) {
if (StringUtils.equals(child.getName(), node.getName())) {
match = true;
result.addAll(child.getChildren());
} else {
result.add(new SimpleTask(child).fork().join());
}
}
node.setChildren(result);
if (match) {
return new SimpleTask(node).fork().join();
}
return node;
}
}
}
|
samhatchett/svelte
|
test/runtime/samples/option-without-select/_config.js
|
export default {
data: {
foo: 'hello'
},
html: `<option value='hello'>hello</option>`,
test(assert, component, target) {
component.set({ foo: 'goodbye' });
assert.htmlEqual(target.innerHTML, `
<option value='goodbye'>goodbye</option>
`);
}
};
|
HoweChen/we-cmdb
|
cmdb-core/src/main/java/com/webank/cmdb/dto/EnumInfoResponse.java
|
<gh_stars>1-10
package com.webank.cmdb.dto;
import java.util.LinkedList;
import java.util.List;
public class EnumInfoResponse {
private PageInfo pageInfo = null;
private List<EnumInfo> enumInfos = new LinkedList<>();
public PageInfo getPageInfo() {
return pageInfo;
}
public void setPageInfo(PageInfo pageInfo) {
this.pageInfo = pageInfo;
}
public List<EnumInfo> getEnumInfos() {
return enumInfos;
}
public void setEnumInfos(List<EnumInfo> enumInfos) {
this.enumInfos = enumInfos;
}
}
|
ahxm/cms
|
src/com/jeecms/cms/statistic/workload/CmsWorkLoadStatisticDao.java
|
package com.jeecms.cms.statistic.workload;
import java.util.Date;
import com.jeecms.cms.statistic.workload.CmsWorkLoadStatistic.CmsWorkLoadStatisticDateKind;
public interface CmsWorkLoadStatisticDao {
public Long statistic(Integer channelId,
Integer reviewerId, Integer authorId, Date beginDate, Date endDate,CmsWorkLoadStatisticDateKind dateKind);
}
|
prepare/spice3f5
|
src/lib/dev/cap/capacld.c
|
/**********
Copyright 1990 Regents of the University of California. All rights reserved.
Author: 1985 <NAME>
**********/
/*
*/
#include "spice.h"
#include <stdio.h>
#include "cktdefs.h"
#include "capdefs.h"
#include "sperror.h"
#include "suffix.h"
int
CAPacLoad(inModel,ckt)
GENmodel *inModel;
register CKTcircuit *ckt;
{
register CAPmodel *model = (CAPmodel*)inModel;
double val;
register CAPinstance *here;
for( ; model != NULL; model = model->CAPnextModel) {
for( here = model->CAPinstances;here != NULL;
here = here->CAPnextInstance) {
val = ckt->CKTomega * here->CAPcapac;
*(here->CAPposPosptr +1) += val;
*(here->CAPnegNegptr +1) += val;
*(here->CAPposNegptr +1) -= val;
*(here->CAPnegPosptr +1) -= val;
}
}
return(OK);
}
|
comprakt/comprakt-fuzz-tests
|
output/97f607efdeef438ebbcc6684f15a702b.java
|
<gh_stars>0
class YCg {
}
class UUvuif {
}
class CY {
public xp mhS0bmR4KPOSL;
public boolean Aw;
public boolean[] MuELWLf1sqLO () throws yn1js5Ur3Osvn {
if ( !false[ true.n84nX()]) while ( -new int[ 8.Zp].BhaCNfyGb()) {
( -!950[ 587.Z_rYI2DkTEraA]).hgks2pdIWL96jC;
}else true[ new x0USz()[ new boolean[ true.Q9Wfw()][ new lMjeyBID8Ntv().G2]]];
;
boolean cubnoeH;
int[] rePM7lRZK2 = true.LLn5_PuX();
;
void eAoX4we_ = SBxvSk[ this.vm] = --true[ -!FzMLx1e().ZdQ4()];
void[][] T;
while ( this.i3qfXgo4uFvUk()) return;
;
void[][][][][] rIlOTKuCO8a0;
uCz2s.P2j0dBjMl584F;
if ( !--( --!-!null.w5wMzr4Y()).zcdEV25YzKQetk()) !new j0J().tC7E0cu;else while ( !( 4324.sxCMeRCFOSx())[ -!!-lWRw.ZBRsXl7Klur5]) {
void[][][][] _1;
}
boolean EWi = !BtQ12z0T()._uqp6JxRq = !new int[ !!684.P7()].r5gk();
_p5M6rnxYmkp JBI7EZXH79q3 = nliBiMmD9[ ( -new a4dXlis5i3w()[ this.mP_nLmR()])[ -this.t]] = RYNgGLLnjuwL_[ ZhBi.hIVFxFx];
hDNlDM88bQp[] BR = --!new boolean[ i5Rx().I1qockMWgY][ -!null[ new int[ !AkjiXi().i8ra()][ 1268[ !!-new CLLob2()[ -!new VAC[ this.p7G44fpP].by6Qg()]]]]] = false.TQTzrcsGb;
int[] gEZ6C8;
void[][][] olKd1r = y7oFwb8.jumL4Qg;
}
public boolean SqnYclOjX0M (void[][][] qkA) {
return;
new int[ null.mgf5xz388()].z0lI8;
-!!true.p;
return !true[ pqrz().GW];
void ODXOjc8DpI5BD;
while ( new YkH3l_6u().yFrxVQzn) ;
boolean _9UzN8moq2bll = 949305756[ !new int[ !-!( Rl8x492nu().D())[ -this.DfBWSu]][ new WVOMfj().mP2RMPDj]];
;
if ( h[ !!-this.d_MKBWVeSWM8gQ]) ;
!!2.M_M9qV();
}
public int di (x3BNQORuL9d V_1_nFzaXVG7Nt, void[] IIOE) throws LBohuBuxOqTPm {
-2413833.pmT67jn;
Z[][] tH = Tq6G()[ !1530[ -----!-!new OFPukDuB1KIwN().YcRk]] = this.FziZ;
int[][][][][][][][][][] Dy6WWOS3D8;
;
return 076[ 499349733[ kGjKH5WZ()._b64tvDb6()]];
;
if ( --!RPZvWYU.RjiZtVZha0XUAc) new HTMw4Z6Tva8Wc().Iat5VXZjWq63i();
return;
}
public static void orTB3ClmL (String[] fsCO2) throws J_HhyYuh {
dNf7vOZfFMBeA.N;
int[][] kW;
boolean[] vXpInRDw34e = !-I3yci[ -!!( false[ false.boffMczIEi()]).rIWKhl8eK()];
void[][] R;
void j4PPCdjX7YRr6 = !new b()[ !true.vRWxy431Yk()] = this.m84k3X;
if ( j().x8xB4PqQFEjgFI) ;else new dOZGtZslQtGZ().DG_kVbj5vv;
;
;
{
void ob6FblP2;
if ( !false.kLn) {
while ( -( this.vI_2O()).cUR) ;
}
boolean y;
tkJ d7X;
void[] gpcADPup;
int[][][] BIbMt4;
}
return;
if ( Sw().R) while ( new ZDh4DCy0O()[ this[ -( !!true[ !736905705.kKJjxLknK]).XClJF]]) ;else {
return;
}
{
--null.OW1RGd;
while ( sdVwCcyv.Bw()) return;
;
boolean S_cF1;
}
return;
void Lf8h = false.l7EjTtH = --null.F0VsKMR();
int[] ljJ5W8r7 = -( -this[ 07341183.MCSr_YSF81hL()])[ !!!-!gq().ZFUt()] = -this[ new void[ new int[ !-null.Pvae4oIc].chDB8hc7].byt_2qvN7()];
void tJ;
;
}
}
|
sigma-libra/Dominion
|
shared/src/main/java/shared/domain/effect/cardaction/impl/TrashCardsAction.java
|
<reponame>sigma-libra/Dominion
package shared.domain.effect.cardaction.impl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import shared.domain.cards.Card;
import shared.domain.effect.cardaction.CardAction;
import shared.domain.engine.GameState;
import shared.domain.engine.Player;
import shared.domain.exceptions.GameException;
import java.io.Serializable;
import java.lang.invoke.MethodHandles;
import java.util.List;
/**
* CardAction: trashes the chosen cards
*/
public class TrashCardsAction implements CardAction, Serializable {
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public void execute(GameState gameState, Player player, List<Card> chosenCards) throws GameException {
LOG.info("execute - TrashCardsAction");
// the CardSource has already removed the cards. We have nothing to do.
}
}
|
sebschrader/debian-pkg-isc-kea
|
src/bin/perfdhcp/tests/perf_pkt6_unittest.cc
|
<gh_stars>0
// Copyright (C) 2012-2015 Internet Systems Consortium, Inc. ("ISC")
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
#include <config.h>
#include <iostream>
#include <sstream>
#include <arpa/inet.h>
#include <gtest/gtest.h>
#include <asiolink/io_address.h>
#include <dhcp/option.h>
#include <dhcp/dhcp6.h>
#include <boost/scoped_ptr.hpp>
#include "../localized_option.h"
#include "../perf_pkt6.h"
using namespace std;
using namespace isc;
using namespace isc::dhcp;
using namespace isc::perfdhcp;
typedef PerfPkt6::LocalizedOptionPtr LocalizedOptionPtr;
namespace {
class PerfPkt6Test : public ::testing::Test {
public:
PerfPkt6Test() {
}
/// \brief Returns captured SOLICIT packet.
///
/// Captured SOLICIT packet with transid=0x3d79fb and options: client-id,
/// in_na, dns-server, elapsed-time, option-request
/// This code was autogenerated
/// (see src/bin/dhcp6/tests/iface_mgr_unittest.c),
/// but we spent some time to make is less ugly than it used to be.
///
/// \return pointer to Pkt6 that represents received SOLICIT
PerfPkt6* capture() {
uint8_t data[98];
data[0] = 1;
data[1] = 1; data[2] = 2; data[3] = 3; data[4] = 0;
data[5] = 1; data[6] = 0; data[7] = 14; data[8] = 0;
data[9] = 1; data[10] = 0; data[11] = 1; data[12] = 21;
data[13] = 158; data[14] = 60; data[15] = 22; data[16] = 0;
data[17] = 30; data[18] = 140; data[19] = 155; data[20] = 115;
data[21] = 73; data[22] = 0; data[23] = 3; data[24] = 0;
data[25] = 40; data[26] = 0; data[27] = 0; data[28] = 0;
data[29] = 1; data[30] = 255; data[31] = 255; data[32] = 255;
data[33] = 255; data[34] = 255; data[35] = 255; data[36] = 255;
data[37] = 255; data[38] = 0; data[39] = 5; data[40] = 0;
data[41] = 24; data[42] = 32; data[43] = 1; data[44] = 13;
data[45] = 184; data[46] = 0; data[47] = 1; data[48] = 0;
data[49] = 0; data[50] = 0; data[51] = 0; data[52] = 0;
data[53] = 0; data[54] = 0; data[55] = 0; data[56] = 18;
data[57] = 52; data[58] = 255; data[59] = 255; data[60] = 255;
data[61] = 255; data[62] = 255; data[63] = 255; data[64] = 255;
data[65] = 255; data[66] = 0; data[67] = 23; data[68] = 0;
data[69] = 16; data[70] = 32; data[71] = 1; data[72] = 13;
data[73] = 184; data[74] = 0; data[75] = 1; data[76] = 0;
data[77] = 0; data[78] = 0; data[79] = 0; data[80] = 0;
data[81] = 0; data[82] = 0; data[83] = 0; data[84] = 221;
data[85] = 221; data[86] = 0; data[87] = 8; data[88] = 0;
data[89] = 2; data[90] = 0; data[91] = 100; data[92] = 0;
data[93] = 6; data[94] = 0; data[95] = 2; data[96] = 0;
data[97] = 23;
PerfPkt6* pkt = new PerfPkt6(data, sizeof(data));
return (pkt);
}
/// \brief Returns truncated SOLICIT packet.
///
/// Returns truncated SOLICIT packet which will be used for
/// negative tests: e.g. pack options out of packet.
///
/// \return pointer to Pkt6 that represents truncated SOLICIT
PerfPkt6* captureTruncated() {
uint8_t data[17];
data[0] = 1;
data[1] = 1; data[2] = 2; data[3] = 3; data[4] = 0;
data[5] = 1; data[6] = 0; data[7] = 14; data[8] = 0;
data[9] = 1; data[10] = 0; data[11] = 1; data[12] = 21;
data[13] = 158; data[14] = 60; data[15] = 22; data[16] = 0;
PerfPkt6* pkt = new PerfPkt6(data, sizeof(data));
return (pkt);
}
};
TEST_F(PerfPkt6Test, Constructor) {
// Data to be used to create packet.
uint8_t data[] = { 0, 1, 2, 3, 4, 5 };
// Test constructor to be used for incoming messages.
// Use default (1) offset value and don't specify transaction id.
boost::scoped_ptr<PerfPkt6> pkt1(new PerfPkt6(data, sizeof(data)));
EXPECT_EQ(sizeof(data), pkt1->data_.size());
EXPECT_EQ(0, memcmp(&pkt1->data_[0], data, sizeof(data)));
EXPECT_EQ(1, pkt1->getTransidOffset());
// Test constructor to be used for outgoing messages.
// Use non-zero offset and specify transaction id.
const size_t offset_transid = 10;
const uint32_t transid = 0x010203;
boost::scoped_ptr<PerfPkt6> pkt2(new PerfPkt6(data, sizeof(data),
offset_transid, transid));
EXPECT_EQ(sizeof(data), pkt2->data_.size());
EXPECT_EQ(0, memcmp(&pkt2->data_[0], data, sizeof(data)));
EXPECT_EQ(0x010203, pkt2->getTransid());
EXPECT_EQ(10, pkt2->getTransidOffset());
}
TEST_F(PerfPkt6Test, RawPackUnpack) {
// Create first packet.
boost::scoped_ptr<PerfPkt6> pkt1(capture());
// Create some input buffers to initialize options.
uint8_t buf_elapsed_time[] = { 1, 1 };
uint8_t buf_duid[] = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14 };
// Create options.
const size_t offset_elapsed_time = 86;
OptionBuffer vec_elapsed_time(buf_elapsed_time,
buf_elapsed_time + sizeof(buf_elapsed_time));
LocalizedOptionPtr pkt1_elapsed_time(new LocalizedOption(Option::V6,
D6O_ELAPSED_TIME,
vec_elapsed_time,
offset_elapsed_time));
const size_t offset_duid = 4;
OptionBuffer vec_duid(buf_duid, buf_duid + sizeof(buf_duid));
LocalizedOptionPtr pkt1_duid(new LocalizedOption(Option::V6,
D6O_CLIENTID,
vec_duid,
offset_duid));
// Add option to packet and create on-wire format from added options.
// Contents of options will override contents of packet buffer.
ASSERT_NO_THROW(pkt1->addOption(pkt1_elapsed_time));
ASSERT_NO_THROW(pkt1->addOption(pkt1_duid));
ASSERT_TRUE(pkt1->rawPack());
// Reset so as we can reuse them for another packet.
vec_elapsed_time.clear();
vec_duid.clear();
// Get output buffer from packet 1 to create new packet
// that will be later validated.
util::OutputBuffer pkt1_output = pkt1->getBuffer();
ASSERT_EQ(pkt1_output.getLength(), pkt1->data_.size());
const uint8_t* pkt1_output_data = static_cast<const uint8_t*>
(pkt1_output.getData());
boost::scoped_ptr<PerfPkt6> pkt2(new PerfPkt6(pkt1_output_data,
pkt1_output.getLength()));
// Create objects specifying options offset in a packet.
// Offsets will inform pkt2 object where to read data from.
LocalizedOptionPtr pkt2_elapsed_time(new LocalizedOption(Option::V6,
D6O_ELAPSED_TIME,
vec_elapsed_time,
offset_elapsed_time));
LocalizedOptionPtr pkt2_duid(new LocalizedOption(Option::V6,
D6O_CLIENTID,
vec_duid,
offset_duid));
// Add options to packet to pass their offsets.
pkt2->addOption(pkt2_elapsed_time);
pkt2->addOption(pkt2_duid);
// Unpack: get relevant parts of buffer data into option objects.
ASSERT_TRUE(pkt2->rawUnpack());
// Once option data is stored in options objects we pull it out.
pkt2_elapsed_time = boost::dynamic_pointer_cast<LocalizedOption>
(pkt2->getOption(D6O_ELAPSED_TIME));
pkt2_duid = boost::dynamic_pointer_cast<LocalizedOption>
(pkt2->getOption(D6O_CLIENTID));
// Check if options are present. They have to be there since
// we have added them ourselfs.
ASSERT_TRUE(pkt2_elapsed_time);
ASSERT_TRUE(pkt2_duid);
// Expecting option contents be the same as original.
OptionBuffer pkt2_elapsed_time_data = pkt2_elapsed_time->getData();
OptionBuffer pkt2_duid_data = pkt2_duid->getData();
EXPECT_EQ(0x0101, pkt2_elapsed_time->getUint16());
EXPECT_TRUE(std::equal(pkt2_duid_data.begin(),
pkt2_duid_data.end(),
buf_duid));
}
TEST_F(PerfPkt6Test, InvalidOptions) {
// Create packet.
boost::scoped_ptr<PerfPkt6> pkt1(capture());
OptionBuffer vec_server_id;
vec_server_id.resize(10);
// Testing invalid offset of the option (greater than packet size)
const size_t offset_serverid[] = { 150, 85 };
LocalizedOptionPtr pkt1_serverid(new LocalizedOption(Option::V6,
D6O_SERVERID,
vec_server_id,
offset_serverid[0]));
pkt1->addOption(pkt1_serverid);
// Pack has to fail due to invalid offset.
EXPECT_FALSE(pkt1->rawPack());
// Create packet.
boost::scoped_ptr<PerfPkt6> pkt2(capture());
// Testing offset of the option (lower than pakcet size but
// tail of the option out of bounds).
LocalizedOptionPtr pkt2_serverid(new LocalizedOption(Option::V6,
D6O_SERVERID,
vec_server_id,
offset_serverid[1]));
pkt2->addOption(pkt2_serverid);
// Pack must fail due to invalid offset.
EXPECT_FALSE(pkt2->rawPack());
}
TEST_F(PerfPkt6Test, TruncatedPacket) {
cout << "Testing parsing options from truncated packet."
<< "This may produce spurious errors" << endl;
// Create truncated (in the middle of duid options)
boost::scoped_ptr<PerfPkt6> pkt1(captureTruncated());
OptionBuffer vec_duid;
vec_duid.resize(30);
const size_t offset_duid = 4;
LocalizedOptionPtr pkt1_duid(new LocalizedOption(Option::V6,
D6O_CLIENTID,
vec_duid,
offset_duid));
pkt1->addOption(pkt1_duid);
// Pack/unpack must fail because length of the option read from buffer
// will extend over the actual packet length.
EXPECT_FALSE(pkt1->rawUnpack());
EXPECT_FALSE(pkt1->rawPack());
}
TEST_F(PerfPkt6Test, PackTransactionId) {
uint8_t data[100];
memset(&data, 0, sizeof(data));
const size_t offset_transid[] = { 50, 100 };
const uint32_t transid = 0x010203;
// Create dummy packet that is simply filled with zeros.
boost::scoped_ptr<PerfPkt6> pkt1(new PerfPkt6(data,
sizeof(data),
offset_transid[0],
transid));
// Reference data are non zero so we can detect them in dummy packet.
uint8_t ref_data[3] = { 1, 2, 3 };
// This will store given transaction id in the packet data at
// offset of 50.
ASSERT_TRUE(pkt1->rawPack());
// Get the output buffer so we can validate it.
util::OutputBuffer out_buf = pkt1->getBuffer();
ASSERT_EQ(sizeof(data), out_buf.getLength());
const uint8_t *out_buf_data = static_cast<const uint8_t*>
(out_buf.getData());
// Validate transaction id.
EXPECT_EQ(0, memcmp(out_buf_data + offset_transid[0], ref_data, 3));
// Out of bounds transaction id offset.
boost::scoped_ptr<PerfPkt6> pkt2(new PerfPkt6(data,
sizeof(data),
offset_transid[1],
transid));
cout << "Testing out of bounds offset. "
"This may produce spurious errors ..." << endl;
EXPECT_FALSE(pkt2->rawPack());
}
TEST_F(PerfPkt6Test, UnpackTransactionId) {
// Initialize data for dummy packet (zeros only).
uint8_t data[100] = { 0 };
// Generate transaction id = 0x010203 and inject at offset = 50.
for (uint8_t i = 50; i < 53; ++i) {
data[i] = i - 49;
}
// Create packet and point out that transaction id is at offset 50.
const size_t offset_transid[] = { 50, 300 };
boost::scoped_ptr<PerfPkt6> pkt1(new PerfPkt6(data,
sizeof(data),
offset_transid[0]));
// Get transaction id out of buffer and store in class member.
ASSERT_TRUE(pkt1->rawUnpack());
// Test value of transaction id.
EXPECT_EQ(0x010203, pkt1->getTransid());
// Out of bounds transaction id offset.
boost::scoped_ptr<PerfPkt6> pkt2(new PerfPkt6(data,
sizeof(data),
offset_transid[1]));
cout << "Testing out of bounds offset. "
"This may produce spurious errors ..." << endl;
EXPECT_FALSE(pkt2->rawUnpack());
}
}
|
anatawa12/intellij-community
|
plugins/lombok/src/main/java/de/plushnikov/intellij/plugin/psi/LombokLightClassBuilder.java
|
<reponame>anatawa12/intellij-community
package de.plushnikov.intellij.plugin.psi;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.impl.ElementPresentationUtil;
import com.intellij.psi.impl.light.LightPsiClassBuilder;
import com.intellij.psi.impl.source.PsiExtensibleClass;
import com.intellij.ui.IconManager;
import com.intellij.ui.icons.RowIcon;
import icons.LombokIcons;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.function.Supplier;
import java.util.stream.Stream;
public class LombokLightClassBuilder extends LightPsiClassBuilder implements PsiExtensibleClass, SyntheticElement {
private final String myQualifiedName;
private final Icon myBaseIcon;
private final LombokLightModifierList myModifierList;
private boolean myIsEnum;
private PsiField[] myFields;
private PsiMethod[] myMethods;
private Supplier<? extends Collection<PsiField>> fieldSupplier = Collections::emptyList;
private Supplier<? extends Collection<PsiMethod>> methodSupplier = Collections::emptyList;
public LombokLightClassBuilder(@NotNull PsiElement context, @NotNull String simpleName, @NotNull String qualifiedName) {
super(context, simpleName);
myIsEnum = false;
myQualifiedName = qualifiedName;
myBaseIcon = LombokIcons.Nodes.LombokClass;
myModifierList = new LombokLightModifierList(context.getManager(), context.getLanguage());
}
@NotNull
@Override
public LombokLightModifierList getModifierList() {
return myModifierList;
}
@Override
public PsiElement getScope() {
if (getContainingClass() != null) {
return getContainingClass().getScope();
}
return super.getScope();
}
@Override
public PsiElement getParent() {
return getContainingClass();
}
@Nullable
@Override
public String getQualifiedName() {
return myQualifiedName;
}
@Override
public Icon getElementIcon(final int flags) {
RowIcon baseIcon = IconManager.getInstance().createLayeredIcon(this, myBaseIcon,
ElementPresentationUtil.getFlags(this, false));
return ElementPresentationUtil.addVisibilityIcon(this, flags, baseIcon);
}
@Override
public TextRange getTextRange() {
TextRange r = super.getTextRange();
return r == null ? TextRange.EMPTY_RANGE : r;
}
@Override
public PsiFile getContainingFile() {
if (null != getContainingClass()) {
return getContainingClass().getContainingFile();
}
return super.getContainingFile();
}
@Override
public boolean isEnum() {
return myIsEnum;
}
@Override
public PsiField @NotNull [] getFields() {
if (null == myFields) {
Collection<PsiField> generatedFields = fieldSupplier.get();
myFields = generatedFields.toArray(PsiField.EMPTY_ARRAY);
fieldSupplier = Collections::emptyList;
}
return myFields;
}
@Override
public PsiMethod @NotNull [] getMethods() {
if (null == myMethods) {
Collection<PsiMethod> generatedMethods = methodSupplier.get();
myMethods = generatedMethods.toArray(PsiMethod.EMPTY_ARRAY);
methodSupplier = Collections::emptyList;
}
return myMethods;
}
@Override
public @NotNull List<PsiField> getOwnFields() {
return Collections.emptyList();
}
@Override
public @NotNull List<PsiMethod> getOwnMethods() {
return Collections.emptyList();
}
@Override
public @NotNull List<PsiClass> getOwnInnerClasses() {
return Collections.emptyList();
}
public LombokLightClassBuilder withFieldSupplier(final Supplier<? extends Collection<PsiField>> fieldSupplier) {
this.fieldSupplier = fieldSupplier;
return this;
}
public LombokLightClassBuilder withMethodSupplier(final Supplier<? extends Collection<PsiMethod>> methodSupplier) {
this.methodSupplier = methodSupplier;
return this;
}
public LombokLightClassBuilder withEnum(boolean isEnum) {
myIsEnum = isEnum;
return this;
}
public LombokLightClassBuilder withImplicitModifier(@PsiModifier.ModifierConstant @NotNull @NonNls String modifier) {
myModifierList.addImplicitModifierProperty(modifier);
return this;
}
public LombokLightClassBuilder withModifier(@PsiModifier.ModifierConstant @NotNull @NonNls String modifier) {
myModifierList.addModifier(modifier);
return this;
}
public LombokLightClassBuilder withContainingClass(@NotNull PsiClass containingClass) {
setContainingClass(containingClass);
return this;
}
public LombokLightClassBuilder withNavigationElement(PsiElement navigationElement) {
setNavigationElement(navigationElement);
return this;
}
public LombokLightClassBuilder withExtends(PsiClassType baseClassType) {
getExtendsList().addReference(baseClassType);
return this;
}
public LombokLightClassBuilder withParameterTypes(@Nullable PsiTypeParameterList parameterList) {
if (parameterList != null) {
Stream.of(parameterList.getTypeParameters()).forEach(this::withParameterType);
}
return this;
}
public LombokLightClassBuilder withParameterType(@NotNull PsiTypeParameter psiTypeParameter) {
getTypeParameterList().addParameter(psiTypeParameter);
return this;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LombokLightClassBuilder that = (LombokLightClassBuilder)o;
return myQualifiedName.equals(that.myQualifiedName);
}
@Override
public int hashCode() {
return myQualifiedName.hashCode();
}
}
|
tenomoto/ncl
|
ni/src/ncl/ccmhdr.h
|
#ifndef CCMHDR_H
#define CCMHDR_H
typedef struct ccm_names {
char* std_name;
char* std_unit;
char* long_name;
char* udunit;
}CCMNAMES;
CCMNAMES ccm_name_tab[] = {
{"PHIS", "M2/S2", "surface geopotential", "m2/s2"},
{"PS", "PA", "surface pressure", "Pa"},
{"T", "K", "temperature", "K"},
{"U", "M/S", "zonal wind component", "m/s"},
{"V", "M/S", "meridional wind component", "m/s"},
{"ETADOT", "1/S", "vertical (eta) velocity", "s-1"},
{"Q", "KG/KG", "specific humidity", "kg/kg"},
{"HA01", "KG/KGS", "horizontal advective tendency of water vapor", "kg/kg/s"},
{"VA01", "KG/KGS", "vertical advective tendency of water vapor", "kg/kg/s"},
{"DF01", "KG/KGS", "SLT tendency of water vapor", "kg/kg/s"},
{"TA01", "KG/KGS", "total advective tendency of water vapor", "kg/kg/s"},
{"VD01", "KG/KGS", "vertical diffusion tendency of water vapor", "kg/kg/s"},
{"DC01", "KG/KGS", "convective adjustment tendency of water vapor", "kg/kg/s"},
{"TE01", "KG/KGS", "total tendency of water vapor", "kg/kg/s"},
{"SS01", "KG/KGS", "source/sink tendency of water vapor", "kg/kg/s"},
{"SF01", "KG/M2/S", "surface flux of water vapor", "kg/m2/s"},
{"DUH", "K/S", "U horizontal diffusive heating", "K/s"},
{"DVH", "K/S", "V horizontal diffusive heating", "K/s"},
{"DTH", "K/S", "T horizontal diffusive heating", "K/s"},
{"NDQH", "KG/KGS", "Q horizontal diffusion", "kg/kg/s"},
{"ORO", "FLAG", "ocean (0), land (1), sea ice (2) flag", NULL},
{"WET", "M", "Soil moisture", "m"},
{"SNOWH", "M", "Water equivalent snow depth", "m"},
{"PRECL", "M/S", "Large-scale (stable) precipitation rate", "m/s"},
{"PRECC", "M/S", "Convective precipitation rate", "m/s"},
{"PRECSL", "M/S", "Large-scale (stable) snow rate (water equivalent)","m/s"},
{"PRECSC", "M/S", "Convective snow rate (water equivalent)", "m/s"},
{"SHFLX", "W/M2", "Surface sensible heat flux", "W/m2"},
{"LHFLX", "W/M2", "Surface latent heat flux", "W/m2"},
{"QFLX", "KG/M2/S", "Surface water flux", "kg/m2/s"},
{"PBLH", "M", "PBL height", "m"},
{"USTAR", "M/S", "Surface friction velocity", "m/s"},
{"TREFHT", "K", "Reference height temperature", "K"},
{"Q10", "KG/KG", "10-meter specific humidity", "kg/kg"},
{"CGH", "K/M", "Counter-gradient term for heat in PBL", "K/m"},
{"CGQ", "1/M", "Counter-gradient term for moisture in PBL", "m-1"},
{"CGS", "S/M2", "Counter-gradient coefficient on surface kinematic fluxes","s/m2"},
{"TPERT", "K", "Perturbation temperature (eddies in PBL)", "K"},
{"QPERT", "KG/KG", "Perturbation specific humidity (eddies in PBL)", "kg/kg"},
{"KVH", "M2/S", "Vertical diffusion diffusivities (heat/moisture)","m2/s"},
{"KVM", "M2/S", "Vertical diffusion diffusivities (momentum)", "m2/s"},
{"DUV", "M/S2", "U vertical diffusion", "m/s2"},
{"DVV", "M/S2", "V vertical diffusion", "m/s2"},
{"DTV", "K/S", "T vertical diffusion", "K/s"},
{"FSNS", "W/M2", "Net solar flux at surface", "W/m2"},
{"FLNS", "W/M2", "Net longwave flux at surface", "W/m2"},
{"FLNT", "W/M2", "Net longwave flux at top", "W/m2"},
{"FSNT", "W/M2", "Net solar flux at top", "W/m2"},
{"CLOUD", "FRACTION", "Cloud fraction", NULL},
{"SETLWP", "G/M2", "Specified liquid water path lengths", "gram/m2"},
{"CLDLWP","G/M2","Actual cloud liquid water path length (account for cloud fraction)","gram/m2"},
{"EFFCLD", "FRACTION", "Effective cloud fraction", NULL},
{"FLNTC", "W/M2", "Clearsky net longwave flux at top", "W/m2"},
{"FSNTC", "W/M2", "Clearsky net solar flux at top", "W/m2"},
{"FLNSC", "W/M2", "Clearsky net longwave flux at surface", "W/m2"},
{"FSNSC", "W/M2", "Clearsky net solar flux at surface", "W/m2"},
{"OMEGA", "PA/S", "Vertical pressure velocity", "Pa/s"},
{"DQP", "KG/KGS", "Specific humidity tendency due to precipitation","kg/kg/s"},
{"TAUX", "N/M2", "X-component (east-west) of surface stress", "N/m2"},
{"TAUY", "N/M2", "Y-component (north-south) of surface stress", "N/m2"},
{"SRFRAD", "W/M2", "Net radiative flux at surface", "W/m2"},
{"QRS", "K/S", "Solar heating rate", "K/s"},
{"QRL", "K/S", "Longwave heating rate", "K/s"},
{"CLDTOT", "FRACTION", "Vertically-integrated, random overlap, total cloud amount", NULL},
{"CLDLOW", "FRACTION", "Vertically-integrated, random overlap, low cloud amount", NULL},
{"CLDMED", "FRACTION", "Vertically-integrated, random overlap, mid-level cloud amount",NULL},
{"CLDHGH", "FRACTION", "Vertically-integrated, random overlap, high cloud amount", NULL},
{"TOTLWP", "FRACTION", "Vertically-integrated actual liquid water path length", NULL},
{"LWSH", "M", "Liquid water scale height", "m"},
{"TS1", "K", "Surface temperature (level 1)", "K"},
{"TS2", "K", "Subsurface temperature (level 2)", "K"},
{"TS3", "K", "Subsurface temperature (level 3)", "K"},
{"TS4", "K", "Subsurface temperature (level 4)", "K"},
{"TS", "K", "Surface temperature", "K"},
{"SOLIN", "W/M2", "Solar insolation", "W/m2"},
{"UTEND", "M/S2", "U tendency", "m/s2"},
{"VTEND", "M/S2", "V tendency", "m/s2"},
{"TTEND", "K/S", "T tendency", "K/s"},
{"LPSTEN", "PA/S", "Surface pressure tendency", "Pa/s"},
{"UTGW", "M/S2", "U tendency - gravity wave drag", "m/s2"},
{"VTGW", "M/S2", "V tendency - gravity wave drag", "m/s2"},
{"TAUGWX", "N/M2", "East-west gravity wave drag surface stress", "N/m2"},
{"TAUGWY", "N/M2", "North-south gravity wave drag surface stress", "N/m2"},
{"DTCOND", "K/S", "T tendency - convective adjustment", "K/s"},
{"CMFDT", "K/S", "T tendency - moist convetion", "K/s"},
{"CMFDQ", "KG/KGS", "Q tendency - moist convection", "kg/kg/s"},
{"CMFDQF", "KG/KGS", "Q tendency - moist convective rainout", "kg/kg/s"},
{"CMFMC", "KG/M2S", "Moist convection mass flux", "kg/m2/s"},
{"CMFSL", "W/M2", "Moist convection liquid water static energy flux","W/m2"},
{"CMFLQ", "W/M2", "Moist convection total water flux", "W/m2"},
{"CNVCLD", "FRACTION", "Random overlap total convective cloud amount", NULL},
{"VT", "KM/S", "Meridional heat transport", "K m/s"},
{"VZ", "M3/S3", "Meridional transport", "m3/s3"},
{"VQ", "M/S", "Meridional water transport", "m/s"},
{"VVPUU", "M2/S2", "Kinetic Energy", "m2/s2"},
{"ALB", "FRACTION", "Albedo", NULL},
{"ALBCLR", "FRACTION", "Clear sky albedo", NULL},
{"RELHUM", "FRACTION", "Relative humidity", NULL},
{"Z2", "M", "Geopotential Height (above sea level)", "m"},
{"MQ", "KG/M2", "water mass", "kg/m2"},
{"PSL", "PASCALS", "Sea level pressure", "Pa"},
{"OMEGAUP", "PA/S", "Average of Omega when omega is < 0 (up)", "Pa/s"},
{"NUMOMGUP", "FRACTION", "Percentage of time omega is < 0 (up)", NULL},
{"CLOUDUP", "FRACTION", "Average of Cloud when omega is < 0 (up)", NULL},
{"DPSLON", "M/S2", "Longitude Pressure Gradient", "m/s2"},
{"DPSLAT", "M/S2", "Latitude Pressure Gradient", "m/s2"},
{NULL,NULL,NULL,NULL}
};
#endif
|
kevinzhwl/ObjectARXMod
|
2004/samples/com/AsdkSquareWrapper_dg/Square/squarent.h
|
// (C) Copyright 1999 by Autodesk, Inc.
//
// Permission to use, copy, modify, and distribute this software in
// object code form for any purpose and without fee is hereby granted,
// provided that the above copyright notice appears in all copies and
// that both that copyright notice and the limited warranty and
// restricted rights notice below appear in all supporting
// documentation.
//
// AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS.
// AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF
// MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC.
// DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE
// UNINTERRUPTED OR ERROR FREE.
//
// Use, duplication, or disclosure by the U.S. Government is subject to
// restrictions set forth in FAR 52.227-19 (Commercial Computer
// Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii)
// (Rights in Technical Data and Computer Software), as applicable.
//
#include "dbents.h"
#include "acgi.h"
#define SIN45 0.7071067811865
#define HALFPI 3.14159265358979323846/2.0
class AsdkSquare : public AcDbCurve
{
public:
ACRX_DECLARE_MEMBERS(AsdkSquare);
AsdkSquare();
virtual ~AsdkSquare();
// AcDbEntity overrides
//
virtual Acad::ErrorStatus getClassID(CLSID* pClsid) const;
virtual Adesk::Boolean worldDraw(AcGiWorldDraw* wd);
virtual Acad::ErrorStatus getOsnapPoints(AcDb::OsnapMode osnapMode,
int gsSelectionMark,
const AcGePoint3d& pickPoint,
const AcGePoint3d& lastPoint,
const AcGeMatrix3d& viewXform,
AcGePoint3dArray& snapPoints,
AcDbIntArray& geomIds
) const;
virtual Acad::ErrorStatus getGripPoints(AcGePoint3dArray& gripPoints,
AcDbIntArray& osnapModes,
AcDbIntArray& geomIds) const;
virtual Acad::ErrorStatus moveGripPointsAt(const AcDbIntArray& indices,
const AcGeVector3d& offset);
virtual Acad::ErrorStatus dwgInFields( AcDbDwgFiler* filer );
virtual Acad::ErrorStatus dwgOutFields( AcDbDwgFiler* filer ) const;
virtual Acad::ErrorStatus dxfInFields( AcDbDxfFiler* filer );
virtual Acad::ErrorStatus dxfOutFields( AcDbDxfFiler* filer ) const;
virtual void list() const;
virtual Acad::ErrorStatus transformBy(const AcGeMatrix3d& xform);
virtual Adesk::Boolean isPlanar() const
{
assertReadEnabled();
return Adesk::kTrue;
}
virtual Acad::ErrorStatus getPlane
(AcGePlane& plane, AcDb::Planarity& type) const
{
assertReadEnabled();
plane.set(mCenter, mNormal);
type = AcDb::kPlanar;
return Acad::eOk;
}
void squareCenter(AcGePoint3d& p) const;
void setSquareCenter(const AcGePoint3d& p );
void squareOrient(AcGeVector3d& p) const;
void setSquareOrient(const AcGeVector3d& p );
void squareNormal(AcGeVector3d& d) const;
void setSquareNormal(const AcGeVector3d& d);
void squareSideLength(double& l) const;
void setSquareSideLength(const double l);
void squareId(int& i) const;
void setSquareId(const int i);
private:
// Our square is defined by it's center, direction vector (where it
// also gets it's size), and a normal.
AcGePoint3d mCenter;
AcGeVector3d mOrient;
AcGeVector3d mNormal;
int mId;
};
inline void AsdkSquare::squareCenter(AcGePoint3d& p) const
{ assertReadEnabled();
p = mCenter;} // Square center
inline void AsdkSquare::setSquareCenter( const AcGePoint3d& p )
{ assertWriteEnabled();
mCenter = p; } // Square center
inline void AsdkSquare::squareOrient(AcGeVector3d& p) const
{assertReadEnabled();
p = mOrient;} // Square Orientation in 3dspace
inline void AsdkSquare::setSquareOrient( const AcGeVector3d& p )
{ assertWriteEnabled();
mOrient = p; } // Square Orientation in 3dspace
inline void AsdkSquare::squareNormal(AcGeVector3d& d) const
{assertReadEnabled();
d = mNormal;} // Square Normal
inline void AsdkSquare::setSquareNormal( const AcGeVector3d& d )
{ assertWriteEnabled();
mNormal = d; } // Square Normal
inline void AsdkSquare::squareSideLength(double& l) const
{assertReadEnabled();
l = (mOrient.length() * SIN45) * 2.0; } // Square side length
inline void AsdkSquare::setSquareSideLength(const double l)
{ assertWriteEnabled();
// Calculate the new vector based on the side length.
mOrient = mOrient.normalize();
mOrient.x = mOrient.x * ((l / 2.0) / SIN45 );
mOrient.y = mOrient.y * ((l / 2.0) / SIN45 );
mOrient.z = mOrient.z * ((l / 2.0) / SIN45 ); }
inline void AsdkSquare::squareId(int& i) const
{assertReadEnabled();
i = mId; } // Square side length
inline void AsdkSquare::setSquareId(const int i)
{ assertWriteEnabled();
// Calculate the new vector based on the side length.
mId = i;}
|
miraDask/Marathon
|
Client/marathon-client/src/pages/projects/index.js
|
import React, { useState, useEffect, useContext, useCallback } from 'react';
import { getCookie } from '../../utils/cookie';
import { ProjectsContext } from '../../providers/projects-context.provider';
import { getProjects } from '../../services/projects.service';
import NoProjects from '../../components/no-projects';
import ProjectsAll from '../../components/all-projects';
import Spinner from '../../components/spinner';
const UserProjectsPage = () => {
const [ projects, setProjects ] = useState([]);
const [ isLoading, setLoading ] = useState(true);
const { updatedProjects } = useContext(ProjectsContext);
const getAllProjects = useCallback(async () => {
const token = getCookie('x-auth-token');
const projectsAll = await getProjects(token);
setProjects(projectsAll);
setLoading(false);
}, []);
useEffect(
() => {
getAllProjects();
},
[ getAllProjects, updatedProjects ]
);
if (isLoading) {
return <Spinner color="green-400" />;
}
return projects.length === 0 ? <NoProjects /> : <ProjectsAll projects={projects} />;
};
export default UserProjectsPage;
|
barreiro/activemq-artemis
|
artemis-core-client/src/main/java/org/apache/activemq/artemis/api/core/Message.java
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.api.core;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import org.apache.activemq.artemis.core.persistence.Persister;
/**
* A Message is a routable instance that has a payload.
* <p>
* The payload (the "body") is opaque to the messaging system. A Message also has a fixed set of
* headers (required by the messaging system) and properties (defined by the users) that can be used
* by the messaging system to route the message (e.g. to ensure it matches a queue filter).
* <h2>Message Properties</h2>
* <p>
* Message can contain properties specified by the users. It is possible to convert from some types
* to other types as specified by the following table:
* <pre>
* | | boolean byte short int long float double String byte[]
* |----------------------------------------------------------------
* |boolean | X X
* |byte | X X X X X
* |short | X X X X
* |int | X X X
* |long | X X
* |float | X X X
* |double | X X
* |String | X X X X X X X X
* |byte[] | X
* |-----------------------------------------------------------------
* </pre>
* <p>
* If conversion is not allowed (for example calling {@code getFloatProperty} on a property set a
* {@code boolean}), a {@link ActiveMQPropertyConversionException} will be thrown.
*
*
* User cases that will be covered by Message
*
* Receiving a buffer:
*
* Message encode = new CoreMessage(); // or any other implementation
* encode.receiveBuffer(buffer);
*
*
* Sending to a buffer:
*
* Message encode;
* size = encode.getEncodeSize();
* encode.encodeDirectly(bufferOutput);
*
*/
public interface Message {
// This is an estimate of how much memory a Message takes up, excluding body and properties
// Note, it is only an estimate, it's not possible to be entirely sure with Java
// This figure is calculated using the test utilities in org.apache.activemq.tests.unit.util.sizeof
// The value is somewhat higher on 64 bit architectures, probably due to different alignment
int memoryOffset = 352;
SimpleString HDR_ROUTE_TO_IDS = new SimpleString("_AMQ_ROUTE_TO");
SimpleString HDR_SCALEDOWN_TO_IDS = new SimpleString("_AMQ_SCALEDOWN_TO");
SimpleString HDR_ROUTE_TO_ACK_IDS = new SimpleString("_AMQ_ACK_ROUTE_TO");
// used by the bridges to set duplicates
SimpleString HDR_BRIDGE_DUPLICATE_ID = new SimpleString("_AMQ_BRIDGE_DUP");
/**
* the actual time the message was expired.
* * *
*/
SimpleString HDR_ACTUAL_EXPIRY_TIME = new SimpleString("_AMQ_ACTUAL_EXPIRY");
/**
* The original address of a message when a message is transferred through DLQ or expiry
*/
SimpleString HDR_ORIGINAL_ADDRESS = new SimpleString("_AMQ_ORIG_ADDRESS");
/**
* The original address of a message when a message is transferred through DLQ or expiry
*/
SimpleString HDR_ORIGINAL_QUEUE = new SimpleString("_AMQ_ORIG_QUEUE");
/**
* The original message ID before th emessage was transferred.
*/
SimpleString HDR_ORIG_MESSAGE_ID = new SimpleString("_AMQ_ORIG_MESSAGE_ID");
/**
* For the Message Grouping feature.
*/
SimpleString HDR_GROUP_ID = new SimpleString("_AMQ_GROUP_ID");
/**
* to determine if the Large Message was compressed.
*/
SimpleString HDR_LARGE_COMPRESSED = new SimpleString("_AMQ_LARGE_COMPRESSED");
/**
* The body size of a large message before it was compressed.
*/
SimpleString HDR_LARGE_BODY_SIZE = new SimpleString("_AMQ_LARGE_SIZE");
/**
* To be used with Scheduled Delivery.
*/
SimpleString HDR_SCHEDULED_DELIVERY_TIME = new SimpleString("_AMQ_SCHED_DELIVERY");
/**
* To be used with duplicate detection.
*/
SimpleString HDR_DUPLICATE_DETECTION_ID = new SimpleString("_AMQ_DUPL_ID");
/**
* To be used with Last value queues.
*/
SimpleString HDR_LAST_VALUE_NAME = new SimpleString("_AMQ_LVQ_NAME");
/**
* To define the mime-type of body messages. Mainly for stomp but it could be informed on any message for user purposes.
*/
SimpleString HDR_CONTENT_TYPE = new SimpleString("_AMQ_CONTENT_TYPE");
/**
* The name of the validated user who sent the message. Useful for auditing.
*/
SimpleString HDR_VALIDATED_USER = new SimpleString("_AMQ_VALIDATED_USER");
/**
* The Routing Type for this message. Ensures that this message is only routed to queues with matching routing type.
*/
SimpleString HDR_ROUTING_TYPE = new SimpleString("_AMQ_ROUTING_TYPE");
byte DEFAULT_TYPE = 0;
byte OBJECT_TYPE = 2;
byte TEXT_TYPE = 3;
byte BYTES_TYPE = 4;
byte MAP_TYPE = 5;
byte STREAM_TYPE = 6;
/** The message will contain another message persisted through {@link org.apache.activemq.artemis.spi.core.protocol.EmbedMessageUtil}*/
byte EMBEDDED_TYPE = 7;
default void cleanupInternalProperties() {
// only on core
}
default RoutingType getRoutingType() {
return null;
}
default Message setRoutingType(RoutingType routingType) {
return this;
}
default SimpleString getLastValueProperty() {
return null;
}
/**
* @deprecated do not use this, use through ICoreMessage or ClientMessage
*/
@Deprecated
default InputStream getBodyInputStream() {
return null;
}
/**
* @deprecated do not use this, use through ICoreMessage or ClientMessage
*/
@Deprecated
default ActiveMQBuffer getBodyBuffer() {
return null;
}
/**
* @deprecated do not use this, use through ICoreMessage or ClientMessage
*/
@Deprecated
default byte getType() {
return (byte)0;
}
/**
* @deprecated do not use this, use through ICoreMessage or ClientMessage
*/
@Deprecated
default Message setType(byte type) {
return this;
}
void messageChanged();
/** Used to calculate what is the delivery time.
* Return null if not scheduled. */
Long getScheduledDeliveryTime();
default Message setScheduledDeliveryTime(Long time) {
return this;
}
/** Context can be used by the application server to inject extra control, like a protocol specific on the server.
* There is only one per Object, use it wisely!
*
* Note: the intent of this was to replace PageStore reference on Message, but it will be later increased by adidn a ServerPojo
* */
RefCountMessageListener getContext();
default SimpleString getGroupID() {
return null;
}
SimpleString getReplyTo();
Message setReplyTo(SimpleString address);
Message setContext(RefCountMessageListener context);
/** The buffer will belong to this message, until release is called. */
Message setBuffer(ByteBuf buffer);
ByteBuf getBuffer();
/** It will generate a new instance of the message encode, being a deep copy, new properties, new everything */
Message copy();
/** It will generate a new instance of the message encode, being a deep copy, new properties, new everything */
Message copy(long newID);
default boolean acceptsConsumer(long uniqueConsumerID) {
return true;
}
default void rejectConsumer(long uniqueConsumerID) {
}
/**
* Returns the messageID.
* <br>
* The messageID is set when the message is handled by the server.
*/
long getMessageID();
// used for NO-LOCAL: mainly for AMQP
default Message setConnectionID(String connectionID) {
return this;
}
default String getConnectionID() {
return null;
}
Message setMessageID(long id);
default boolean isLargeMessage() {
return false;
}
/**
* Returns the expiration time of this message.
*/
long getExpiration();
/**
* Sets the expiration of this message.
*
* @param expiration expiration time
*/
Message setExpiration(long expiration);
/**
* Returns whether this message is expired or not.
*/
default boolean isExpired() {
if (getExpiration() == 0) {
return false;
}
return System.currentTimeMillis() - getExpiration() >= 0;
}
/**
*
* This represents historically the JMSMessageID.
* We had in the past used this for the MessageID that was sent on core messages...
*
* later on when we added AMQP this name clashed with AMQPMessage.getUserID();
*
* @return the user id
*/
Object getUserID();
Message setUserID(Object userID);
default String getValidatedUserID() {
return null;
}
default Message setValidatedUserID(String validatedUserID) {
return this;
}
/**
* Returns whether this message is durable or not.
*/
boolean isDurable();
/**
* Sets whether this message is durable or not.
*
* @param durable {@code true} to flag this message as durable, {@code false} else
*/
Message setDurable(boolean durable);
Persister<Message> getPersister();
String getAddress();
Message setAddress(String address);
SimpleString getAddressSimpleString();
Message setAddress(SimpleString address);
long getTimestamp();
Message setTimestamp(long timestamp);
/**
* Returns the message priority.
* <p>
* Values range from 0 (less priority) to 9 (more priority) inclusive.
*/
byte getPriority();
/**
* Sets the message priority.
* <p>
* Value must be between 0 and 9 inclusive.
*
* @param priority the new message priority
*/
Message setPriority(byte priority);
/** Used to receive this message from an encoded medium buffer */
void receiveBuffer(ByteBuf buffer);
/** Used to send this message to an encoded medium buffer.
* @param buffer the buffer used.
* @param deliveryCount Some protocols (AMQP) will have this as part of the message. */
void sendBuffer(ByteBuf buffer, int deliveryCount);
int getPersistSize();
void persist(ActiveMQBuffer targetRecord);
void reloadPersistence(ActiveMQBuffer record);
default void releaseBuffer() {
ByteBuf buffer = getBuffer();
if (buffer != null) {
buffer.release();
}
setBuffer(null);
}
default void reencode() {
// only valid probably on AMQP
}
default void referenceOriginalMessage(final Message original, String originalQueue) {
String queueOnMessage = original.getAnnotationString(Message.HDR_ORIGINAL_QUEUE);
if (queueOnMessage != null) {
setAnnotation(Message.HDR_ORIGINAL_QUEUE, queueOnMessage);
} else if (originalQueue != null) {
setAnnotation(Message.HDR_ORIGINAL_QUEUE, originalQueue);
}
Object originalID = original.getAnnotation(Message.HDR_ORIG_MESSAGE_ID);
if (originalID != null) {
setAnnotation(Message.HDR_ORIGINAL_ADDRESS, original.getAnnotationString(Message.HDR_ORIGINAL_ADDRESS));
setAnnotation(Message.HDR_ORIG_MESSAGE_ID, originalID);
} else {
setAnnotation(Message.HDR_ORIGINAL_ADDRESS, original.getAddress());
setAnnotation(Message.HDR_ORIG_MESSAGE_ID, original.getMessageID());
}
// reset expiry
setExpiration(0);
}
/**
* it will translate a property named HDR_DUPLICATE_DETECTION_ID.
* @return
*/
default byte[] getDuplicateIDBytes() {
Object duplicateID = getDuplicateProperty();
if (duplicateID == null) {
return null;
} else {
if (duplicateID instanceof SimpleString) {
return ((SimpleString) duplicateID).getData();
} else if (duplicateID instanceof String) {
return new SimpleString(duplicateID.toString()).getData();
} else {
return (byte[]) duplicateID;
}
}
}
default org.apache.activemq.artemis.api.core.Message putExtraBytesProperty(SimpleString key, byte[] value) {
return putBytesProperty(key, value);
}
default byte[] getExtraBytesProperty(SimpleString key) throws ActiveMQPropertyConversionException {
return getBytesProperty(key);
}
default byte[] removeExtraBytesProperty(SimpleString key) throws ActiveMQPropertyConversionException {
return (byte[])removeProperty(key);
}
default Object getDuplicateProperty() {
return null;
}
Message putBooleanProperty(String key, boolean value);
Message putByteProperty(String key, byte value);
Message putBytesProperty(String key, byte[] value);
Message putShortProperty(String key, short value);
Message putCharProperty(String key, char value);
Message putIntProperty(String key, int value);
Message putLongProperty(String key, long value);
Message putFloatProperty(String key, float value);
Message putDoubleProperty(String key, double value);
Message putBooleanProperty(SimpleString key, boolean value);
Message putByteProperty(SimpleString key, byte value);
Message putBytesProperty(SimpleString key, byte[] value);
Message putShortProperty(SimpleString key, short value);
Message putCharProperty(SimpleString key, char value);
Message putIntProperty(SimpleString key, int value);
Message putLongProperty(SimpleString key, long value);
Message putFloatProperty(SimpleString key, float value);
Message putDoubleProperty(SimpleString key, double value);
/**
* Puts a String property in this message.
*
* @param key property name
* @param value property value
*/
Message putStringProperty(String key, String value);
Message putObjectProperty(String key, Object value) throws ActiveMQPropertyConversionException;
Message putObjectProperty(SimpleString key, Object value) throws ActiveMQPropertyConversionException;
Object removeProperty(String key);
boolean containsProperty(String key);
Boolean getBooleanProperty(String key) throws ActiveMQPropertyConversionException;
Byte getByteProperty(String key) throws ActiveMQPropertyConversionException;
Double getDoubleProperty(String key) throws ActiveMQPropertyConversionException;
Integer getIntProperty(String key) throws ActiveMQPropertyConversionException;
Long getLongProperty(String key) throws ActiveMQPropertyConversionException;
Object getObjectProperty(String key);
Short getShortProperty(String key) throws ActiveMQPropertyConversionException;
Float getFloatProperty(String key) throws ActiveMQPropertyConversionException;
String getStringProperty(String key) throws ActiveMQPropertyConversionException;
SimpleString getSimpleStringProperty(String key) throws ActiveMQPropertyConversionException;
byte[] getBytesProperty(String key) throws ActiveMQPropertyConversionException;
Object removeProperty(SimpleString key);
boolean containsProperty(SimpleString key);
Boolean getBooleanProperty(SimpleString key) throws ActiveMQPropertyConversionException;
Byte getByteProperty(SimpleString key) throws ActiveMQPropertyConversionException;
Double getDoubleProperty(SimpleString key) throws ActiveMQPropertyConversionException;
Integer getIntProperty(SimpleString key) throws ActiveMQPropertyConversionException;
Long getLongProperty(SimpleString key) throws ActiveMQPropertyConversionException;
Object getObjectProperty(SimpleString key);
default Object removeAnnotation(SimpleString key) {
return removeProperty(key);
}
default String getAnnotationString(SimpleString key) {
Object value = getAnnotation(key);
if (value != null) {
return value.toString();
} else {
return null;
}
}
Object getAnnotation(SimpleString key);
/** Callers must call {@link #reencode()} in order to be sent to clients */
default Message setAnnotation(SimpleString key, Object value) {
putObjectProperty(key, value);
return this;
}
Short getShortProperty(SimpleString key) throws ActiveMQPropertyConversionException;
Float getFloatProperty(SimpleString key) throws ActiveMQPropertyConversionException;
String getStringProperty(SimpleString key) throws ActiveMQPropertyConversionException;
SimpleString getSimpleStringProperty(SimpleString key) throws ActiveMQPropertyConversionException;
byte[] getBytesProperty(SimpleString key) throws ActiveMQPropertyConversionException;
Message putStringProperty(SimpleString key, SimpleString value);
/**
* Returns the size of the <em>encoded</em> message.
*/
int getEncodeSize();
/**
* Returns all the names of the properties for this message.
*/
Set<SimpleString> getPropertyNames();
int getRefCount();
int incrementRefCount() throws Exception;
int decrementRefCount() throws Exception;
int incrementDurableRefCount();
int decrementDurableRefCount();
/**
* @return Returns the message in Map form, useful when encoding to JSON
*/
default Map<String, Object> toMap() {
Map map = toPropertyMap();
map.put("messageID", getMessageID());
Object userID = getUserID();
if (getUserID() != null) {
map.put("userID", "ID:" + userID.toString());
}
map.put("address", getAddress() == null ? "" : getAddress());
map.put("durable", isDurable());
map.put("expiration", getExpiration());
map.put("timestamp", getTimestamp());
map.put("priority", (int)getPriority());
return map;
}
/**
* @return Returns the message properties in Map form, useful when encoding to JSON
*/
default Map<String, Object> toPropertyMap() {
Map map = new HashMap<>();
for (SimpleString name : getPropertyNames()) {
map.put(name.toString(), getObjectProperty(name.toString()));
}
return map;
}
/** This should make you convert your message into Core format. */
ICoreMessage toCore();
int getMemoryEstimate();
}
|
scottfrazer/mongock-core
|
mongock-driver/mongock-driver-api/src/main/java/com/github/cloudyrock/mongock/driver/api/driver/ConnectionDriver.java
|
<filename>mongock-driver/mongock-driver-api/src/main/java/com/github/cloudyrock/mongock/driver/api/driver/ConnectionDriver.java
package com.github.cloudyrock.mongock.driver.api.driver;
import com.github.cloudyrock.mongock.driver.api.common.Validable;
import com.github.cloudyrock.mongock.driver.api.entry.ChangeEntry;
import com.github.cloudyrock.mongock.driver.api.entry.ChangeEntryService;
import com.github.cloudyrock.mongock.driver.api.lock.LockManager;
import java.util.Set;
public interface ConnectionDriver<CHANGE_ENTRY extends ChangeEntry> extends Validable {
void initialize();
LockManager getLockManager();
LockManager getAndAcquireLockManager();
ChangeEntryService<CHANGE_ENTRY> getChangeEntryService();
Set<ChangeSetDependency> getDependencies();
ForbiddenParametersMap getForbiddenParameters();
Class getLegacyMigrationChangeLogClass(boolean runAlways);
void setLockAcquiredForMinutes(long lockAcquiredForMinutes);
void setMaxWaitingForLockMinutes(long maxWaitingForLockMinutes);
void setMaxTries(int maxTries);
void setChangeLogRepositoryName(String changeLogRepositoryName);
void setLockRepositoryName(String lockRepositoryName);
void setIndexCreation(boolean indexCreation);
boolean isInitialized();
long getLockAcquiredForMinutes();
long getMaxWaitingForLockMinutes();
int getMaxTries();
String getChangeLogRepositoryName();
String getLockRepositoryName();
boolean isIndexCreation();
}
|
hehonghui/mmat
|
mmat/src/main/java/org/eclipse/mat/parser/internal/snapshot/RetainedSizeCache.java
|
<reponame>hehonghui/mmat
/*******************************************************************************
* Copyright (c) 2008 SAP AG.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* SAP AG - initial API and implementation
*******************************************************************************/
package org.eclipse.mat.parser.internal.snapshot;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.NoSuchElementException;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.eclipse.mat.collect.HashMapIntLong;
import org.eclipse.mat.parser.internal.Messages;
import org.eclipse.mat.parser.model.XSnapshotInfo;
public class RetainedSizeCache {
private String filename;
private HashMapIntLong id2size;
private boolean isDirty = false;
public RetainedSizeCache(XSnapshotInfo snapshotInfo) {
this.filename = snapshotInfo.getPrefix() + "i2sv2.index"; //$NON-NLS-1$
readId2Size(snapshotInfo.getPrefix());
}
public long get(int key) {
try {
return id2size.get(key);
} catch (NoSuchElementException e) {
// $JL-EXC$
return 0;
}
}
public void put(int key, long value) {
id2size.put(key, value);
isDirty = true;
}
public void close() {
if (!isDirty)
return;
try {
File file = new File(filename);
DataOutputStream out = new DataOutputStream(new FileOutputStream(file));
for (int key : id2size.getAllKeys()) {
out.writeInt(key);
out.writeLong(id2size.get(key));
}
out.close();
isDirty = false;
} catch (IOException e) {
Logger.getLogger(RetainedSizeCache.class.getName()).log(Level.WARNING,
Messages.RetainedSizeCache_Warning_IgnoreError, e);
}
}
private void doRead(File file, boolean readOldFormat) {
DataInputStream in = null;
boolean delete = false;
try {
id2size = new HashMapIntLong((int) file.length() / 8);
in = new DataInputStream(new BufferedInputStream(new FileInputStream(file)));
while (in.available() > 0) {
int key = in.readInt();
long value = in.readLong();
if (value < 0 && readOldFormat)
value = -(value - (Long.MIN_VALUE + 1));
id2size.put(key, value);
}
} catch (IOException e) {
Logger.getLogger(RetainedSizeCache.class.getName()).log(Level.WARNING,
Messages.RetainedSizeCache_ErrorReadingRetainedSizes, e);
// might have read corrupt data
id2size.clear();
delete = true;
} finally {
try {
if (in != null) {
in.close();
}
} catch (IOException ignore) {
// $JL-EXC$
}
try {
if (delete) {
file.delete();
}
} catch (RuntimeException ignore) {
// $JL-EXC$
}
}
}
private void readId2Size(String prefix) {
File file = new File(filename);
if (file.exists()) {
doRead(file, false);
} else {
File legacyFile = new File(prefix + "i2s.index");//$NON-NLS-1$
if (legacyFile.exists()) {
doRead(legacyFile, true);
} else {
id2size = new HashMapIntLong();
}
}
}
}
|
duc110789/vnpay
|
src/apis/api-user.auth.js
|
import BaseRepository from '../repositories/BaseRepository';
const request = new BaseRepository();
const authApi = {
login({ email, password }) {
const data = {
email,
password,
client_id: window.app.clientId,
client_secret: window.app.clientSecret,
};
return request.post('api/v1/signin', data);
},
forgotPassword(email) {
return request.post('api/v1/forgot-password', {
email,
});
},
verifyToken(token) {
return request.get('api/v1/reset-password', {
token,
});
},
resetPassword(data) {
return request.post('api/v1/reset-password', data);
},
myProfile() {
return request.get('api/v1/my-profile');
},
};
export default authApi;
|
orenmazor/moto
|
moto/elasticbeanstalk/models.py
|
import weakref
from boto3 import Session
from moto.core import BaseBackend, BaseModel, ACCOUNT_ID
from .exceptions import InvalidParameterValueError, ResourceNotFoundException
from .utils import make_arn
class FakeEnvironment(BaseModel):
def __init__(
self, application, environment_name, solution_stack_name, tags,
):
self.application = weakref.proxy(
application
) # weakref to break circular dependencies
self.environment_name = environment_name
self.solution_stack_name = solution_stack_name
self.tags = tags
@property
def application_name(self):
return self.application.application_name
@property
def environment_arn(self):
resource_path = "%s/%s" % (self.application_name, self.environment_name)
return make_arn(self.region, ACCOUNT_ID, "environment", resource_path)
@property
def platform_arn(self):
return "TODO" # TODO
@property
def region(self):
return self.application.region
class FakeApplication(BaseModel):
def __init__(self, backend, application_name):
self.backend = weakref.proxy(backend) # weakref to break cycles
self.application_name = application_name
self.environments = dict()
def create_environment(
self, environment_name, solution_stack_name, tags,
):
if environment_name in self.environments:
raise InvalidParameterValueError
env = FakeEnvironment(
application=self,
environment_name=environment_name,
solution_stack_name=solution_stack_name,
tags=tags,
)
self.environments[environment_name] = env
return env
@property
def region(self):
return self.backend.region
@property
def arn(self):
return make_arn(self.region, ACCOUNT_ID, "application", self.application_name)
class EBBackend(BaseBackend):
def __init__(self, region):
self.region = region
self.applications = dict()
def reset(self):
# preserve region
region = self.region
self._reset_model_refs()
self.__dict__ = {}
self.__init__(region)
def create_application(self, application_name):
if application_name in self.applications:
raise InvalidParameterValueError(
"Application {} already exists.".format(application_name)
)
new_app = FakeApplication(backend=self, application_name=application_name,)
self.applications[application_name] = new_app
return new_app
def create_environment(self, app, environment_name, stack_name, tags):
return app.create_environment(
environment_name=environment_name,
solution_stack_name=stack_name,
tags=tags,
)
def describe_environments(self):
envs = []
for app in self.applications.values():
for env in app.environments.values():
envs.append(env)
return envs
def list_available_solution_stacks(self):
# Implemented in response.py
pass
def update_tags_for_resource(self, resource_arn, tags_to_add, tags_to_remove):
try:
res = self._find_environment_by_arn(resource_arn)
except KeyError:
raise ResourceNotFoundException(
"Resource not found for ARN '{}'.".format(resource_arn)
)
for key, value in tags_to_add.items():
res.tags[key] = value
for key in tags_to_remove:
del res.tags[key]
def list_tags_for_resource(self, resource_arn):
try:
res = self._find_environment_by_arn(resource_arn)
except KeyError:
raise ResourceNotFoundException(
"Resource not found for ARN '{}'.".format(resource_arn)
)
return res.tags
def _find_environment_by_arn(self, arn):
for app in self.applications.keys():
for env in self.applications[app].environments.values():
if env.environment_arn == arn:
return env
raise KeyError()
eb_backends = {}
for region in Session().get_available_regions("elasticbeanstalk"):
eb_backends[region] = EBBackend(region)
for region in Session().get_available_regions(
"elasticbeanstalk", partition_name="aws-us-gov"
):
eb_backends[region] = EBBackend(region)
for region in Session().get_available_regions(
"elasticbeanstalk", partition_name="aws-cn"
):
eb_backends[region] = EBBackend(region)
|
ubmagh/ayoub_maghdaoui-JEE
|
TP2/HospitalApp -Associations/src/main/java/me/ubmagh/hospital/SpringDataAppApplication.java
|
<filename>TP2/HospitalApp -Associations/src/main/java/me/ubmagh/hospital/SpringDataAppApplication.java
package me.ubmagh.hospital;
import me.ubmagh.hospital.entities.*;
import me.ubmagh.hospital.repositories.ConsultationRepository;
import me.ubmagh.hospital.repositories.MedecinRepository;
import me.ubmagh.hospital.repositories.PatientRepository;
import me.ubmagh.hospital.repositories.RendezVousRepository;
import me.ubmagh.hospital.services.IHospitalService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import java.util.Date;
import java.util.Random;
import java.util.stream.Stream;
@SpringBootApplication
public class SpringDataAppApplication {
@Autowired
private PatientRepository patientRepository;
public static void main(String[] args) {
SpringApplication.run(me.ubmagh.hospital.SpringDataAppApplication.class, args);
}
@Bean
CommandLineRunner start(
/*
PatientRepository patientRepository,
MedecinRepository medecinRepository,
RendezVousRepository rendezVousRepository,
ConsultationRepository consultationRepository
*/
MedecinRepository medecinRepository,
IHospitalService hospitalService
){ // DI
return args -> {
patientRepository.save( new Patient( null, "ayoub", "Maghdaoui", new Date(), 100, false, null));
Stream.of( "ahmed", "Najat", "Hassan").forEach(s -> {
Patient p = new Patient();
p.setFname(s);
p.setLname(s);
p.setScore(20);
p.setSick(false);
p.setBirthDate(new Date());
// patientRepository.save(p);
hospitalService.savePatient(p);
});
Random r = new Random();
Stream.of( "Ayman", "hannan", "Mohammed").forEach( s -> {
Medecin m = new Medecin();
m.setNom(s);
m.setEmail(s+"<EMAIL>");
m.setSpecialite( Stream.of("cardio", "dentiste", "generale", "optics").skip(r.nextInt(4)).findFirst().get() );
// medecinRepository.save(m);
hospitalService.saveMedecin(m);
});
Patient patient = patientRepository.findByFname("ayoub");
Patient patient1 = patientRepository.findById(1L).orElse(null);
Medecin medecin = medecinRepository.findByNom("Ayman");
RendezVous rendezVous = new RendezVous();
rendezVous.setDate( new Date());
rendezVous.setMedecin( medecin );
rendezVous.setPatient(patient);
rendezVous.setStatus( StatusRDV.PENDING );
hospitalService.saveRdv( rendezVous);
Consultation consultation = new Consultation();
consultation.setDateConsultation( new Date());
consultation.setRendezVous(rendezVous);
consultation.setRapport(" Rapport de consultation ..................;");
hospitalService.saveConsultation(consultation);
};
}
}
/*
// playing with patient entity
@SpringBootApplication
public class SpringDataAppApplication implements CommandLineRunner {
@Autowired
private PatientRepository patientRepository;
public static void main(String[] args) {
SpringApplication.run(SpringDataAppApplication.class, args);
}
@Override
public void run(String... args) throws Exception { // called automatically after main()
System.out.println("===> Insertion des Patients ...");
patientRepository.save( new Patient( null, "ayoub", "Ahmed", new Date(), 10, false, null));
patientRepository.save( new Patient( null, "ibrahim", "Ali", new Date(), 10, true, null));
patientRepository.save( new Patient( null, "mahmoud", "Ali", new Date(), 10, true, null));
List<Patient> patients = patientRepository.findAll();
System.out.println("======================= \n ==> Liste des Patients =======================");
patients.forEach( patient -> {
System.out.println("----------------------");
System.out.println(" -> prenom : "+patient.getFname());
System.out.println(" -> nom : "+patient.getLname());
System.out.println(" -> date de naissance : "+patient.getBirthDate());
System.out.println(" -> score : "+patient.getScore());
System.out.println(" -> malade : "+patient.isSick() );
});
System.out.println("======================= \n ==> patient ayant l'id 2 =======================");
Patient patient = patientRepository.findById(2L).orElse(null);
if( patient==null )
System.out.println("-> patient introuvable");
else {
System.out.println("-> patient trouvé : ");
System.out.println(" -> prenom : " + patient.getFname());
System.out.println(" -> nom : " + patient.getLname());
System.out.println(" -> date de naissance : " + patient.getBirthDate());
System.out.println(" -> score : " + patient.getScore());
System.out.println(" -> malade : " + patient.isSick());
System.out.println("======================= \n ==> modifier le score de patient à 921 =======================");
patient.setScore(921);
patientRepository.save(patient);
System.out.println("-> vérifier la base de données, la valeur est modifiée avec succès ");
}
System.out.println("======================= \n ==> supprimer le patient ayant l'id 3 =======================");
patientRepository.deleteById(3L);
System.out.println("-> vérifier la base de données, la patient supprimé avec succès ");
System.out.println("======================= \n ==> Remplissage de la BD avec 101 patients ... =======================");
for( int o =0; o<101; o++)
patientRepository.save( new Patient( null, "ayoub"+o, "Ahmed"+o, new Date(), (int) (Math.random()), true, null));
System.out.println("======================= \n ==> paginer les patients =======================");
Page<Patient> page = patientRepository.findAll(PageRequest.of(0, 3));
System.out.println("--> contenu de la page 1 : ");
for ( Patient pati: page) {
System.out.println(" -> prenom : " + pati.getFname() +" -> nom : " + pati.getLname());
}
page = patientRepository.findAll(PageRequest.of(2, 3));
System.out.println("--> contenu de la page 2 : ");
for ( Patient pati: page.getContent()) {
System.out.println(" -> prenom : " + pati.getFname() +" -> nom : " + pati.getLname());
}
System.out.println("nombre de pages: "+page.getTotalPages());
System.out.println(" nombre des elements: "+page.getTotalElements());
System.out.println(" page courante : "+page.getNumber());
System.out.print("\n\n ======================= \n ==> trouver les patients ayant malade=false =======================\n result : \n");
patients = patientRepository.findBySick(true);
for ( Patient pati: patients ) {
System.out.println(" "+pati.getId()+" -> prenom : " + pati.getFname() +" -> nom : " + pati.getLname());
}
System.out.print("\n\n ======================= \n ==> trouver les patients ayant nom=Ali OU prenom='' =======================\n result : \n");
patients = patientRepository.findByFnameOrLname("", "Ali");
for ( Patient pati: patients ) {
System.out.println(" "+pati.getId()+" -> prenom : " + pati.getFname() +" -> nom : " + pati.getLname());
}
System.out.print("\n\n ======================= \n ==> trouver les patients ayant le score < 10 =======================\n result : \n");
patients = patientRepository.findByScoreLessThan(10);
for ( Patient pati: patients ) {
System.out.println(" "+pati.getId()+" -> prenom : " + pati.getFname() +" -> nom : " + pati.getLname());
}
System.out.print("\n\n ======================= \n ==> les patients ayant un prenom qui contient 'a' page 1 --> pageable (page:1, size:3) =======================\n result : \n");
page = patientRepository.findByFnameContains("a", PageRequest.of(1, 3));
for ( Patient pati: page.getContent() ) {
System.out.println(" "+pati.getId()+" -> fname : " + pati.getFname() +" -> lname : " + pati.getLname());
}
System.out.println(" nombre de pages : "+page.getNumber()+" | nombre total des elements : "+page.getTotalElements());
String today = LocalDate.now().format(DateTimeFormatter.ISO_DATE);
String yesterDay = (LocalDate.now().minusDays(1)).format(DateTimeFormatter.ISO_DATE);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
System.out.print("\n\n ======================= \n ==> Trouver les patient par la fonction findByBirthDateBetweenAndBoAndScoreGreaterThanAndFnameContains --> d1=yesterday, d2= today, b=true, scoreMin=100, fnameSearchable='a' =======================\n result : \n");
patients = patientRepository.findByBirthDateBetweenAndSickAndScoreGreaterThanAndFnameContains( sdf.parse(yesterDay), sdf.parse(today), true, 100, "a" );
for ( Patient pati: patients ) {
System.out.println(" "+pati.getId()+" -> prenom : " + pati.getFname() +" -> nom : " + pati.getLname());
}
System.out.print("\n ==> Trouver les patients par la fonction JustFindWhatIWant --> d1=yesterday, d2= today, b=true, scoreMin=100, fnameSearchable='a' =======================\n result : \n");
patients = patientRepository.JustFindWhatIWant( sdf.parse(yesterDay), sdf.parse(today), true, 100, "%a%" );
for ( Patient pati: patients ) {
System.out.println(" "+pati.getId()+" -> prenom : " + pati.getFname() +" -> nom : " + pati.getLname());
}
System.out.println("*****************");
}
}
*/
|
akarnokd/akarnokd-misc
|
src/test/java/hu/akarnokd/rxjava3/UndeliverableTest.java
|
package hu.akarnokd.rxjava3;
import java.io.IOException;
import org.junit.Test;
import io.reactivex.rxjava3.plugins.RxJavaPlugins;
import io.reactivex.rxjava3.processors.PublishProcessor;
import io.reactivex.rxjava3.subscribers.TestSubscriber;
public class UndeliverableTest {
@Test
public void test() {
RxJavaPlugins.setErrorHandler(error -> System.out.println(error));
PublishProcessor<Integer> main = PublishProcessor.create();
PublishProcessor<Integer> inner = PublishProcessor.create();
// switchMapDelayError will delay all errors
TestSubscriber<Integer> ts = main.switchMapDelayError(v -> inner).test();
main.onNext(1);
// the inner fails
inner.onError(new IOException());
// the consumer is still clueless
ts.assertEmpty();
// the consumer cancels
ts.cancel();
}
}
|
lechium/iOS1351Headers
|
usr/libexec/identityservicesd/IDSGroupServer.h
|
//
// Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Sep 26 2020 13:48:20).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import <objc/NSObject.h>
#import "IDSGroupServer-Protocol.h"
@class NSString;
@protocol FTMessageDeliveryProtocol, IDSGroupServerPushHandler;
@interface IDSGroupServer : NSObject <IDSGroupServer>
{
id <FTMessageDeliveryProtocol> _messageDelivery; // 8 = 0x8
id <IDSGroupServerPushHandler> _pushHandler; // 16 = 0x10
}
- (void).cxx_destruct; // IMP=0x00000001002408b4
@property(retain, nonatomic) id <IDSGroupServerPushHandler> pushHandler; // @synthesize pushHandler=_pushHandler;
@property(retain, nonatomic) id <FTMessageDeliveryProtocol> messageDelivery; // @synthesize messageDelivery=_messageDelivery;
- (void)_queryGroupServerENID:(id)arg1 withPreviousEntries:(id)arg2 completion:(CDUnknownBlockType)arg3; // IMP=0x0000000100240340
- (void)queryGroupServerForENID:(id)arg1 withCompletion:(CDUnknownBlockType)arg2; // IMP=0x0000000100240290
- (void)publishGroupForKey:(id)arg1 data:(id)arg2 signature:(id)arg3 forwardingSig:(id)arg4 ENID:(id)arg5 version:(id)arg6 completion:(CDUnknownBlockType)arg7; // IMP=0x000000010023fc54
- (CDUnknownBlockType)_putMessageCompletionWithCompletion:(CDUnknownBlockType)arg1; // IMP=0x000000010023f7c4
- (CDUnknownBlockType)_getMessageCompletionCurrentEntries:(id)arg1 withCompletion:(CDUnknownBlockType)arg2; // IMP=0x000000010023f404
- (id)initWithMessageDelivery:(id)arg1; // IMP=0x000000010023f2e0
- (id)init; // IMP=0x000000010023f228
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
|
lihongli528628/yishu
|
medtree/medtree/Groups/Common/LoadingTableView.h
|
<reponame>lihongli528628/yishu<gh_stars>1-10
//
// LoadingTableView.h
// medtree
//
// Created by sam on 9/23/14.
// Copyright (c) 2014 sam. All rights reserved.
//
#import "BaseTableView.h"
@class LoadingView;
@interface LoadingTableView : BaseTableView {
LoadingView *_headerView;
LoadingView *_footerView;
}
@end
|
mcvine/mcvine
|
packages/mccomponents/mccomponentsbpmodule/wrap_AbstractScatteringKernel.cc
|
// -*- C++ -*-
//
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
//
// <NAME>
// California Institute of Technology
// (C) 2005 All Rights Reserved
//
// {LicenseText}
//
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
//
#include <sstream>
#include <boost/python.hpp>
#include "mccomponents/homogeneous_scatterer/AbstractScatteringKernel.h"
namespace wrap_mccomponents {
void wrap_AbstractScatteringKernel()
{
using namespace boost::python;
using namespace mccomponents;
class_<AbstractScatteringKernel, boost::noncopyable>
("AbstractScatteringKernel", no_init)
.def("scatter", &AbstractScatteringKernel::scatter)
.def("absorb", &AbstractScatteringKernel::absorb)
.def("scattering_coefficient", &AbstractScatteringKernel::scattering_coefficient)
.def("absorption_coefficient", &AbstractScatteringKernel::absorption_coefficient)
;
}
}
// version
// $Id$
// End of file
|
gtfierro/cs262-project
|
coordinator/server.go
|
<reponame>gtfierro/cs262-project
package main
import (
"fmt"
log "github.com/Sirupsen/logrus"
"github.com/gtfierro/cs262-project/common"
"net"
"strings"
"sync"
"time"
)
type Server struct {
addrString string
address *net.TCPAddr
metadata *common.MetadataStore
fwdTable *ForwardingTable
brokerManager BrokerManager
leaderService LeaderService
etcdManager EtcdManager
heartbeatInterval time.Duration
messageBuffer chan *MessageFromBroker
brokerDeathChan chan *common.UUID
brokerLiveChan chan *common.UUID
brokerReassignChan chan *BrokerReassignment
waitGroup sync.WaitGroup
stop chan bool
stopped bool
}
func NewServer(config *common.Config) *Server {
var (
err error
s = &Server{}
)
if config.Coordinator.Global {
s.addrString = fmt.Sprintf("0.0.0.0:%d", config.Coordinator.Port)
} else {
s.addrString = fmt.Sprintf(":%d", config.Coordinator.Port)
}
// parse the config into an address
s.address, err = net.ResolveTCPAddr("tcp", s.addrString)
if err != nil {
log.WithFields(log.Fields{
"port": config.Coordinator.Port, "global": config.Coordinator.Global, "error": err.Error(),
}).Fatal("Could not resolve the generated TCP address")
}
s.heartbeatInterval = time.Duration(config.Coordinator.HeartbeatInterval) * time.Second
s.metadata = common.NewMetadataStore(config)
s.brokerDeathChan = make(chan *common.UUID, 10)
s.brokerLiveChan = make(chan *common.UUID, 10)
s.brokerReassignChan = make(chan *BrokerReassignment, 500)
s.messageBuffer = make(chan *MessageFromBroker, 50)
var ipswitcher IPSwitcher
if config.Coordinator.UseAWSIPSwitcher {
ipswitcher, err = NewAWSIPSwitcher(config.Coordinator.InstanceId, config.Coordinator.Region, config.Coordinator.ElasticIP)
if err != nil {
log.WithFields(log.Fields{
"error": err.Error(),
}).Fatal("Could not create AWS IP Switcher")
}
} else {
ipswitcher = &DummyIPSwitcher{}
}
if config.Coordinator.UseEtcd {
etcdConn := NewEtcdConnection(strings.Split(config.Coordinator.EtcdAddresses, ","))
s.leaderService = NewLeaderService(etcdConn, 5*time.Second, ipswitcher)
s.etcdManager = NewEtcdManager(etcdConn, s.leaderService, config.Coordinator.CoordinatorCount,
config.Coordinator.GCFreq, 5*time.Second, 1000, config.Coordinator.EnableContinuousCheckpointing,
config.Coordinator.DisableSendLog)
} else {
s.leaderService = &DummyLeaderService{}
s.etcdManager = &DummyEtcdManager{}
}
s.brokerManager = NewBrokerManager(s.etcdManager, s.heartbeatInterval, s.brokerDeathChan,
s.brokerLiveChan, s.messageBuffer, s.brokerReassignChan, new(common.RealClock))
s.fwdTable = NewForwardingTable(s.metadata, s.brokerManager, s.etcdManager, s.brokerDeathChan, s.brokerLiveChan, s.brokerReassignChan)
go s.fwdTable.monitorInboundChannels()
s.stop = make(chan bool, 1)
s.stopped = false
return s
}
func (s *Server) Shutdown() {
close(s.stop)
s.etcdManager.CancelWatch()
s.leaderService.CancelWatch()
s.waitGroup.Wait()
s.stopped = true
}
// Starts the necessary goroutines and then listens; does not return
func (s *Server) Run() {
logStartKey := s.rebuildIfNecessary()
go s.handleLeadership()
go s.handleBrokerMessages()
go s.monitorLog(logStartKey)
go s.monitorGeneralConnections()
s.listenAndDispatch()
}
// Check the log: if there is more than one entry (the initial leader entry),
// then start the rebuild process
// Returns the key at which the log should start being monitored
func (s *Server) rebuildIfNecessary() string {
s.metadata.DropDatabase() // Clear out DB; it's transient and only for running queries
logBelowThreshold, logKey, logRev := s.etcdManager.GetLogStatus()
if !logBelowThreshold {
s.brokerManager.RebuildFromEtcd(logRev)
s.fwdTable.RebuildFromEtcd(logRev)
}
return logKey
}
// Doesn't return
func (s *Server) handleLeadership() {
s.waitGroup.Add(1)
defer s.waitGroup.Done()
_, err := s.leaderService.AttemptToBecomeLeader()
if err != nil {
log.WithField("error", err).Error("Error while attempting to become the initial leader")
}
go s.leaderService.WatchForLeadershipChange()
s.leaderService.MaintainLeaderLease()
}
// Won't return
func (s *Server) monitorLog(startKey string) {
s.waitGroup.Add(1)
defer s.waitGroup.Done()
endKey := startKey
for {
// If we're a leader, just wait... nothing to be done here
select {
case <-s.stop:
case <-s.leaderService.WaitForNonleadership():
}
if common.IsChanClosed(s.stop) {
return
}
endKey = s.etcdManager.WatchLog(endKey)
}
}
func (s *Server) monitorGeneralConnections() {
s.waitGroup.Add(1)
defer s.waitGroup.Done()
for {
// If we're a leader, just wait... nothing to be done here
select {
case <-s.stop:
case <-s.leaderService.WaitForNonleadership():
}
if common.IsChanClosed(s.stop) {
return
}
commConn := NewReplicaCommConn(s.etcdManager, s.leaderService, GeneralSuffix, s.heartbeatInterval)
go func() {
select {
case <-s.stop:
case <-s.leaderService.WaitForLeadership():
}
commConn.Close()
}()
// Now we're definitely not a leader, set up a watch for the leader's events
for {
msg, err := commConn.ReceiveMessage()
if err == nil {
go s.dispatch(NewSingleEventCommConn(commConn, msg), LogPrefix+"/"+GeneralSuffix)
} else {
break // continue outer loop since we're no longer leader
}
}
}
}
func (s *Server) handleMessage(brokerMessage *MessageFromBroker) {
brokerID := brokerMessage.broker.BrokerID
switch msg := brokerMessage.message.(type) {
case *common.BrokerPublishMessage:
s.fwdTable.HandlePublish(msg.UUID, msg.Metadata, brokerID, nil)
brokerMessage.broker.Send(&common.AcknowledgeMessage{msg.MessageID})
case *common.BrokerQueryMessage:
s.fwdTable.HandleSubscription(msg.Query, msg.UUID, brokerID, nil)
brokerMessage.broker.Send(&common.AcknowledgeMessage{msg.MessageID})
case *common.PublisherTerminationMessage:
s.fwdTable.HandlePublisherTermination(msg.PublisherID, brokerID)
brokerMessage.broker.Send(&common.AcknowledgeMessage{msg.MessageID})
case *common.ClientTerminationMessage:
s.fwdTable.HandleSubscriberTermination(msg.ClientID, brokerID)
brokerMessage.broker.Send(&common.AcknowledgeMessage{msg.MessageID})
default:
log.WithFields(log.Fields{
"message": msg, "messageType": common.GetMessageType(msg), "brokerID": brokerID,
}).Warn("Received unexpected message from a broker")
}
}
func (s *Server) dispatch(commConn CommConn, address string) {
msg, err := commConn.ReceiveMessage()
if err != nil {
log.WithFields(log.Fields{
"error": err, "tcpAddr": address,
}).Error("Error decoding message from connection")
commConn.Close()
return
}
log.WithFields(log.Fields{
"msg": msg, "messageType": common.GetMessageType(msg), "address": address,
}).Info("Received a message")
switch m := msg.(type) {
case *common.BrokerConnectMessage:
err = s.brokerManager.ConnectBroker(&m.BrokerInfo, commConn.GetBrokerConn(m.BrokerID))
if err != nil {
log.WithFields(log.Fields{
"error": err, "brokerInfo": m.BrokerInfo, "tcpAddr": address,
}).Error("Error while connecting to broker")
}
ack := &common.AcknowledgeMessage{MessageID: m.MessageID}
commConn.Send(ack)
case *common.BrokerRequestMessage:
if resp, err := s.brokerManager.HandlePubClientRemapping(m); err == nil {
commConn.Send(resp)
} else {
log.WithFields(log.Fields{
"requestMessage": msg, "error": err,
}).Error("Publisher/client requested remapping but failed")
}
default:
log.WithFields(log.Fields{
"tcpAddr": address, "message": msg, "messageType": common.GetMessageType(msg),
}).Warn("Received unexpected message type over a new connection")
}
}
func (s *Server) handleBrokerMessages() {
s.waitGroup.Add(1)
defer s.waitGroup.Done()
for {
select {
case <-s.stop:
return
case msg := <-s.messageBuffer:
go s.handleMessage(msg)
}
}
}
func (s *Server) listenAndDispatch() {
var (
listener *net.TCPListener
conn *net.TCPConn
err error
)
s.waitGroup.Add(1)
defer s.waitGroup.Done()
log.WithFields(log.Fields{
"address": s.address,
}).Info("Coordinator listening for requests!")
// loop on the TCP connection and hand new connections to the dispatcher
LeaderLoop:
for {
waitChan := s.leaderService.WaitForLeadership()
select {
case <-waitChan:
case <-s.stop:
}
if common.IsChanClosed(s.stop) {
return
}
// listen on the address
listener, err = net.ListenTCP("tcp", s.address)
if err != nil {
log.WithFields(log.Fields{
"address": s.address, "error": err.Error(),
}).Fatal("Could not listen on the provided address")
return
}
go func() {
waitChan := s.leaderService.WaitForNonleadership()
select {
case <-s.stop:
listener.Close()
case <-waitChan:
listener.Close()
}
}()
ListenLoop:
for {
conn, err = listener.AcceptTCP()
if err != nil {
if common.IsChanClosed(s.stop) {
return
} else {
log.WithField("error", err.Error()).Error("Error accepting connection")
continue ListenLoop
}
}
if !s.leaderService.IsLeader() {
log.WithField("address", conn.RemoteAddr()).Info("Rejecting inbound connection because leadership is not held")
conn.Close() // Reject connections when not the leader
listener.Close()
continue LeaderLoop
} else {
log.WithField("address", conn.RemoteAddr()).Info("Accepting inbound connection on leader")
commConn := NewLeaderCommConn(s.etcdManager, s.leaderService, GeneralSuffix, conn)
go s.dispatch(commConn, fmt.Sprintf("%v", conn.RemoteAddr()))
}
}
}
}
|
ttungl/Coding-Interview-Challenge
|
source-code/Unique Paths 62.py
|
# 62. Unique Paths
# <EMAIL>
# A robot is located at the top-left corner of a m x n grid (marked 'Start' in the diagram below).
# The robot can only move either down or right at any point in time. The robot is trying to reach the bottom-right corner of the grid (marked 'Finish' in the diagram below).
# How many possible unique paths are there?
class Solution(object):
def uniquePaths(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
"""
# sol 1: DP 1-D
# time O(n^2); space O(n)
# runtime: 25ms
# --
if not m or not n:
return 0
count = [1]*n # keep tracking on paths.
for i in range(1, m):
for j in range(1, n):
count[j] += count[j-1]
return count[-1] # res
# sol 2: DP 2-D
# time O(n^2); space O(n*m)
# runtime: 32ms
# --
if not m or not n: return 0
dp = [[1 for _ in range(n)] for _ in range(m)]
for i in range(1, m):
for j in range(1, n):
dp[i][j] = dp[i-1][j] + dp[i][j-1]
return dp[-1][-1] # res
|
Vawx/weighttracker
|
spec/models/food_spec.rb
|
<filename>spec/models/food_spec.rb<gh_stars>0
require 'rails_helper'
RSpec.describe Food, type: :model do
it { should validate_presence_of :name }
it { should validate_presence_of :calories }
it { should validate_presence_of :serving_size }
it { should validate_presence_of :server_size_measure }
it { should validate_presence_of :fat_grams }
it { should validate_presence_of :carbs_grams }
it { should validate_presence_of :protien_grams }
it { should validate_presence_of :cholesterol_milli }
it { should validate_presence_of :weight_grams }
it { should validate_presence_of :sat_fat_grams }
end
=begin
t.string "name"
t.integer "calories"
t.integer "serving_size"
t.string "server_size_measure"
t.integer "fat_grams"
t.integer "carbs_grams"
t.integer "protien_grams"
t.integer "cholesterol_milli"
t.float "weight_grams"
t.float "sat_fat_grams"
=end
|
nikola-naydenov-hmcts/div-petitioner-frontend
|
app/steps/respondent/home/address/index.test.js
|
<filename>app/steps/respondent/home/address/index.test.js<gh_stars>1-10
const request = require('supertest');
const { testContent, testRedirect } = require('test/util/assertions');
const { withSession } = require('test/util/setup');
const server = require('app');
const idamMock = require('test/mocks/idam');
const { removeStaleData } = require('app/core/helpers/staleDataManager');
const { expect } = require('test/util/chai');
const { clone } = require('lodash');
const modulePath = 'app/steps/respondent/home/address';
const content = require(`${modulePath}/content`);
let s = {};
let agent = {};
let underTest = {};
describe(modulePath, () => {
beforeEach(() => {
idamMock.stub();
s = server.init();
agent = request.agent(s.app);
underTest = s.steps.RespondentHomeAddress;
});
afterEach(() => {
idamMock.restore();
});
describe('success', () => {
let session = {};
beforeEach(done => {
session = { divorceWho: 'wife' };
withSession(done, agent, session);
});
it('renders the content from the content file', done => {
testContent(done, agent, underTest, content, session);
});
it('redirects to the next page', done => {
const context = {
addressType: 'postcode',
addressConfirmed: 'true',
address: ['address', '1', 'ea1 eaf'],
postcode: 'ea1 eaf',
postcodeError: false
};
testRedirect(done, agent, underTest, context,
s.steps.RespondentCorrespondenceUseHomeAddress);
});
});
describe('Watched session values', () => {
it('removes respondentHomeAddress if respondentKnowsHomeAddress is changed and respondentKnowsHomeAddress is no', () => {
const previousSession = {
respondentKnowsHomeAddress: 'Yes',
respondentHomeAddress: ['Address 1', 'Address 2', 'Address 3'],
livingArrangementsLiveTogether: 'No'
};
const session = clone(previousSession);
session.respondentKnowsHomeAddress = 'No';
const newSession = removeStaleData(previousSession, session);
expect(typeof newSession.respondentHomeAddress).to.equal('undefined');
});
it('remove respondentHomeAddress if respondentKnowsHomeAddress is removed and not living together', () => {
const previousSession = {
respondentKnowsHomeAddress: 'Yes',
respondentHomeAddress: ['Address 1', 'Address 2', 'Address 3'],
livingArrangementsLiveTogether: 'No'
};
const session = clone(previousSession);
delete session.respondentKnowsHomeAddress;
const newSession = removeStaleData(previousSession, session);
expect(typeof newSession.respondentHomeAddress).to.equal('undefined');
});
it('does not remove respondentHomeAddress if respondentKnowsHomeAddress is removed but are still living together', () => {
const respondentHomeAddress = ['Address 1', 'Address 2', 'Address 3'];
const previousSession = {
respondentKnowsHomeAddress: 'Yes',
respondentHomeAddress,
livingArrangementsLiveTogether: 'Yes'
};
const session = clone(previousSession);
delete session.respondentKnowsHomeAddress;
const newSession = removeStaleData(previousSession, session);
expect(newSession.respondentHomeAddress).to.equal(respondentHomeAddress);
});
});
});
|
kestred/panda3d
|
panda/src/egg/eggUtilities.cxx
|
// Filename: eggUtilities.cxx
// Created by: drose (28Jan99)
//
////////////////////////////////////////////////////////////////////
//
// PANDA 3D SOFTWARE
// Copyright (c) Carnegie Mellon University. All rights reserved.
//
// All use of this software is subject to the terms of the revised BSD
// license. You should have received a copy of this license along
// with this source code in a file named "LICENSE."
//
////////////////////////////////////////////////////////////////////
#include "eggUtilities.h"
#include "eggPrimitive.h"
#include "eggGroupNode.h"
#include "pt_EggTexture.h"
#include "dcast.h"
////////////////////////////////////////////////////////////////////
// Function: get_textures_by_filename
// Description: Extracts from the egg subgraph beginning at the
// indicated node a set of all the texture objects
// referenced, grouped together by filename. Texture
// objects that share a common filename (but possibly
// differ in other properties) are returned together in
// the same element of the map.
////////////////////////////////////////////////////////////////////
void
get_textures_by_filename(const EggNode *node, EggTextureFilenames &result) {
if (node->is_of_type(EggPrimitive::get_class_type())) {
const EggPrimitive *prim = DCAST(EggPrimitive, node);
int num_textures = prim->get_num_textures();
for (int i = 0; i < num_textures; i++) {
PT_EggTexture tex = prim->get_texture(i);
result[tex->get_filename()].insert(tex);
}
} else if (node->is_of_type(EggGroupNode::get_class_type())) {
const EggGroupNode *group = DCAST(EggGroupNode, node);
EggGroupNode::const_iterator ci;
for (ci = group->begin(); ci != group->end(); ++ci) {
get_textures_by_filename(*ci, result);
}
}
}
|
rju/peething
|
architectureeditor/src/de/peerthing/systembehavioureditor/propertyeditor/forms/ActionForm.java
|
package de.peerthing.systembehavioureditor.propertyeditor.forms;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.*;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.forms.widgets.ScrolledForm;
import de.peerthing.systembehavioureditor.PeerThingSystemBehaviourEditorPlugin;
import de.peerthing.systembehavioureditor.model.IParameter;
import de.peerthing.systembehavioureditor.model.editor.Action;
import de.peerthing.systembehavioureditor.model.editor.Parameter;
import de.peerthing.systembehavioureditor.propertyeditor.PropertyEditor;
import de.peerthing.systembehavioureditor.propertyeditor.plugindatahandler.PlugInAction;
import de.peerthing.systembehavioureditor.propertyeditor.plugindatahandler.PlugInParameter;
/**
* This class manages a form with which you can manipulate the data of an action
* in the propertyeditor
*
* @author Sebastian
* @Reviewer <NAME>
*/
public class ActionForm implements SelectionListener {
/**
* A label for the action to chosse
*/
Label chooseLabel;
/**
* A combo box which allows to choose to action from the plugin
*/
Combo callCombo;
/**
* A label for adding paramater
*/
Label paraLabel;
/**
* A combo box which allows to choose parameter for the action
*/
Combo paraCombo;
/**
* A label for the description of the action
*/
Label descriptionLabel;
/**
* A textfield filled with the description of the action
*/
Text descriptionText;
/**
* The current form
*/
ScrolledForm form;
/**
* The current action
*/
Action a1;
/**
* The current propertyview
*/
PropertyEditor actionview;
/**
* Gui is initialised in the constructor
*
* @param container
*/
public ActionForm(Composite container) {
FormToolkit toolkit = new FormToolkit(container.getDisplay());
form = toolkit.createScrolledForm(container);
form.getBody().setLayout(new GridLayout(2, false));
form.setText("Choose a defined action: ");
toolkit.paintBordersFor(form.getBody());
GridData gd = new GridData(GridData.FILL_HORIZONTAL);
chooseLabel = toolkit.createLabel(form.getBody(), "Choose action:",
SWT.NONE);
callCombo = new Combo(form.getBody(), SWT.NONE);
callCombo.setLayoutData(gd);
callCombo.addSelectionListener(this);
gd = new GridData(GridData.FILL_BOTH);
descriptionLabel = toolkit.createLabel(form.getBody(), "Description:",
SWT.NONE);
descriptionText = new Text(form.getBody(), SWT.MULTI | SWT.WRAP
| SWT.V_SCROLL);
descriptionText.setEditable(false);
descriptionText.setLayoutData(gd);
descriptionText.addSelectionListener(this);
gd = new GridData(GridData.FILL_HORIZONTAL);
paraLabel = toolkit.createLabel(form.getBody(), "Choose parameter:",
SWT.NONE);
paraCombo = new Combo(form.getBody(), SWT.NONE);
paraCombo.setLayoutData(gd);
paraCombo.addSelectionListener(this);
}
/**
* This method searches all actions defined in th plugin and inserts them
* into the combo box
*/
public void callInit() {
List<String> actions = new ArrayList<String>();
for (PlugInAction pa : PeerThingSystemBehaviourEditorPlugin
.getDefault().getPlugInDataHandler().getPluginActions()) {
actions.add(pa.getName());
}
Collections.sort(actions);
for (int i = 0; i < actions.size(); i++) {
callCombo.add(actions.get(i));
}
}
/**
* This method searches all parameter of the action and inserts them into
* the combo box
*/
public void paraInit() {
if (callCombo.getSelectionIndex() >= 0) {
for (PlugInParameter pp : PeerThingSystemBehaviourEditorPlugin
.getDefault().getPlugInDataHandler().getPluginAction(
callCombo.getItem(callCombo.getSelectionIndex()))
.getPluginparameter()) {
if (!pp.isRequired())
paraCombo.add(pp.getName());
}
}
}
/**
* Every time the user selects an action in the editor the method will be
* called to adjust the form to the choosen object
*
* @param object
* @param actionview
*/
public void update(Object object, PropertyEditor actionview) {
this.actionview = actionview;
a1 = (Action) object;
if (callCombo.getItemCount() == 0) {
callInit();
}
boolean validAction = false;
for (int x = 0; x < callCombo.getItems().length; x++) {
if (callCombo.getItem(x).equals(a1.getName())) {
validAction = true;
callCombo.select(x);
descriptionText
.setText(PeerThingSystemBehaviourEditorPlugin
.getDefault().getPlugInDataHandler()
.getPluginAction(
callCombo.getItem(callCombo
.getSelectionIndex()))
.getDescription());
}
}
if (!validAction) {
callCombo.deselectAll();
descriptionText.setText("");
} else {
if (paraCombo.getItemCount() == 0) {
paraInit();
}
}
}
/**
* Returns the form.
*
* @return the form
*/
public ScrolledForm getForm() {
return form;
}
/**
* The actions of the user are handled here.
*
* @param event
*/
public void widgetSelected(SelectionEvent event) {
if (event.getSource().equals(callCombo)) {
updateAction();
} else if (event.getSource().equals(paraCombo)) {
Combo c = (Combo) event.getSource();
Parameter tmp = new Parameter(c.getItem(c.getSelectionIndex()), a1);
a1.addParameter(tmp);
actionview.nodeNameChanged(a1);
actionview.getTreeViewer().refresh();
}
}
/**
* Updates the action with the value currently selected in the call combo
* box
*
*/
private void updateAction() {
Map<String, IParameter> paras = new Hashtable<String, IParameter>();
a1.setParameters(paras);
paraCombo.removeAll();
paraInit();
for (PlugInParameter pp : PeerThingSystemBehaviourEditorPlugin
.getDefault().getPlugInDataHandler().getPluginAction(
callCombo.getItem(callCombo.getSelectionIndex()))
.getPluginparameter()) {
Parameter tmp = new Parameter(pp.getName(), a1);
if (pp.isRequired())
a1.addParameter(tmp);
}
a1.setName(callCombo.getItem(callCombo.getSelectionIndex()));
descriptionText.setText(PeerThingSystemBehaviourEditorPlugin
.getDefault().getPlugInDataHandler().getPluginAction(
callCombo.getItem(callCombo.getSelectionIndex()))
.getDescription());
actionview.nodeNameChanged(a1);
actionview.getTreeViewer().refresh();
actionview.getTreeViewer().expandToLevel(a1, 1);
}
public void widgetDefaultSelected(SelectionEvent event) {
}
}
|
orionzhou/biolib
|
old/PhasedHaplotypeParser.py
|
<filename>old/PhasedHaplotypeParser.py
'''
@author: <NAME>, University of Minnesota
'''
import os.path;
import re;
F_VARIANT = 1;
F_CLASS = 2;
F_POS = 3;
F_REF_ALLELE = 4;
F_VAR_ALLELE = 5;
F_EXON = 9;
F_ACC_OFFSET = 13;
class PhasedHaplotypeParser():
def __init__(self, accessionN = 3, accessionColN = 7, delim = '\t'):
self.accessionN = accessionN;
self.accessionColN = accessionColN;
self.delim = delim;
self.markers = [];
self.haplotypes = [];
for k in range(self.accessionN + 1):
famId = "F%03d" % k;
self.haplotypes.append([famId]);
self.nucleotides = { "A":1, "C":2, "G":3, "T":4 };
def parse(self, fPathIn, freqTheshold, fPathPhased = None, fPathMarker = None):
print("Parsing...");
if fPathPhased == None:
fPathPhased = fPathIn + ".haps";
if fPathMarker == None:
fPathMarker = fPathIn + ".info";
with open(fPathIn, 'r') as fIn:
line = fIn.readline();
hdr = line.split(self.delim);
self.haplotypes[0].append("REF");
for k in range(self.accessionN):
accNameIdx = F_ACC_OFFSET + k * self.accessionColN;
self.haplotypes[k + 1].append(hdr[accNameIdx]);
prevPos = 0;
line = fIn.readline();
while line != "":
fields = line.split(self.delim);
if fields[F_CLASS] == "S" and fields[F_EXON] != '' and fields[F_REF_ALLELE] in self.nucleotides:
if fields[F_POS] != prevPos:
self.markers.append([fields[F_VARIANT] + ":" + fields[F_EXON], fields[F_POS]]);
nId = self.nucleotides[fields[F_REF_ALLELE]];
self.haplotypes[0].append(nId);
for k in range(self.accessionN):
freqIdx = F_ACC_OFFSET + k * self.accessionColN + 3;
if float(fields[freqIdx]) > freqThreshold:
nId = self.nucleotides[fields[F_VAR_ALLELE].upper()];
self.haplotypes[k + 1].append(nId);
else:
nId = self.nucleotides[fields[F_REF_ALLELE]];
self.haplotypes[k + 1].append(nId);
# else:
# for k in range(self.accessionN):
# freqIdx = F_ACC_OFFSET + k * self.accessionColN + 3;
# if float(fields[freqIdx]) > freqThreshold:
# self.haplotypes[k + 1][-1] = self.nucleotides[fields[F_VAR_ALLELE].upper()];
prevPos = fields[F_POS];
line = fIn.readline();
with open(fPathMarker, 'w') as fMarker:
for marker in self.markers:
fMarker.write(self.delim.join(marker));
fMarker.write('\n');
with open(fPathPhased, 'w') as fPhased:
for accession in self.haplotypes:
fPhased.write(self.delim.join(map(str, accession)) + '\n');
fPhased.write(self.delim.join(map(str, accession)) + '\n');
if __name__ == '__main__':
fPathIn = "variant_table.10_30.txt";
freqThreshold = 0.85;
phParser = PhasedHaplotypeParser();
phParser.parse(fPathIn, freqThreshold);
|
npocmaka/Windows-Server-2003
|
net/config/shell/lanui/eapolui.cpp
|
<filename>net/config/shell/lanui/eapolui.cpp
#include "pch.h"
#pragma hdrstop
#include "connutil.h"
#include "ncnetcon.h"
#include "ncperms.h"
#include "ncui.h"
#include "lanui.h"
#include "eapolui.h"
#include "util.h"
#include "lanhelp.h"
#include "wzcprops.h"
#include "wzcpage.h"
#include "wzcui.h"
#include "wzcsapi.h"
////////////////////////////////////////////////////////////////////////
// CEapolConfig related stuff
//
//+---------------------------------------------------------------------------
// constructor
CEapolConfig::CEapolConfig()
{
m_dwCtlFlags = 0;
ZeroMemory(&m_EapolIntfParams, sizeof(EAPOL_INTF_PARAMS));
m_pListEapcfgs = NULL;
}
//+---------------------------------------------------------------------------
// destructor
CEapolConfig::~CEapolConfig()
{
ZeroMemory(&m_EapolIntfParams, sizeof(EAPOL_INTF_PARAMS));
if (m_pListEapcfgs)
{
DtlDestroyList (m_pListEapcfgs, DestroyEapcfgNode);
}
m_pListEapcfgs = NULL;
}
//+---------------------------------------------------------------------------
DWORD CEapolConfig::CopyEapolConfig(CEapolConfig *pEapolConfig)
{
DTLLIST *pListEapcfgs = NULL;
DTLNODE *pCopyNode = NULL, *pInNode = NULL;
DWORD dwRetCode = ERROR_SUCCESS;
if (pEapolConfig)
{
pListEapcfgs = ::ReadEapcfgList (EAPOL_MUTUAL_AUTH_EAP_ONLY);
if (pListEapcfgs)
{
for (pCopyNode = DtlGetFirstNode(pListEapcfgs);
pCopyNode;
pCopyNode = DtlGetNextNode(pCopyNode))
{
EAPCFG* pCopyEapcfg = (EAPCFG* )DtlGetData(pCopyNode);
for (pInNode = DtlGetFirstNode(pEapolConfig->m_pListEapcfgs);
pInNode;
pInNode = DtlGetNextNode(pInNode))
{
EAPCFG* pInEapcfg = (EAPCFG* )DtlGetData(pInNode);
if (pCopyEapcfg->dwKey == pInEapcfg->dwKey)
{
if ((pCopyEapcfg->pData = (PBYTE) MALLOC (pInEapcfg->cbData)) == NULL)
{
dwRetCode = ERROR_NOT_ENOUGH_MEMORY;
break;
}
memcpy (pCopyEapcfg->pData, pInEapcfg->pData, pInEapcfg->cbData);
pCopyEapcfg->cbData = pInEapcfg->cbData;
break;
}
}
if (dwRetCode != NO_ERROR)
{
goto LExit;
}
}
}
m_pListEapcfgs = pListEapcfgs;
memcpy (&m_EapolIntfParams, &pEapolConfig->m_EapolIntfParams, sizeof(EAPOL_INTF_PARAMS));
}
else
{
dwRetCode = ERROR_INVALID_DATA;
}
LExit:
if (dwRetCode != ERROR_SUCCESS)
{
if (pListEapcfgs)
{
DtlDestroyList (pListEapcfgs, DestroyEapcfgNode);
}
}
return dwRetCode;
}
//+---------------------------------------------------------------------------
LRESULT CEapolConfig::LoadEapolConfig(LPWSTR wszIntfGuid, PNDIS_802_11_SSID pndSsid)
{
BYTE *pbData = NULL;
DWORD cbData = 0;
EAPOL_INTF_PARAMS EapolIntfParams;
DTLLIST *pListEapcfgs = NULL;
HRESULT hr = S_OK;
// Initialize EAP package list
// Read the EAPCFG information from the registry and find the node
// selected in the entry, or the default, if none.
do
{
DTLNODE* pNode = NULL;
// Read the EAPCFG information from the registry and find the node
// selected in the entry, or the default, if none.
pListEapcfgs = ::ReadEapcfgList (EAPOL_MUTUAL_AUTH_EAP_ONLY);
if (pListEapcfgs)
{
DTLNODE* pNodeEap;
DWORD dwkey = 0;
// Read the EAP params for this interface
ZeroMemory ((BYTE *)&EapolIntfParams, sizeof(EAPOL_INTF_PARAMS));
EapolIntfParams.dwEapFlags = DEFAULT_EAP_STATE;
EapolIntfParams.dwEapType = DEFAULT_EAP_TYPE;
if (pndSsid)
{
EapolIntfParams.dwSizeOfSSID = pndSsid->SsidLength;
memcpy (EapolIntfParams.bSSID, pndSsid->Ssid, pndSsid->SsidLength);
}
else
{
// If NULL SSID, this will get default EAPOL values
EapolIntfParams.dwSizeOfSSID = 1;
}
hr = HrElGetInterfaceParams (
wszIntfGuid,
&EapolIntfParams
);
if (FAILED (hr))
{
TraceTag (ttidLanUi, "HrElGetInterfaceParams failed with error %ld",
LresFromHr(hr));
break;
}
TraceTag (ttidLanUi, "HrElGetInterfaceParams: Got EAPtype=(%ld), EAPState =(%ld)", EapolIntfParams.dwEapType, EapolIntfParams.dwEapFlags);
memcpy (&m_EapolIntfParams, &EapolIntfParams, sizeof(EAPOL_INTF_PARAMS));
// Read the EAP configuration info for all EAP packages
for (pNodeEap = DtlGetFirstNode(pListEapcfgs);
pNodeEap;
pNodeEap = DtlGetNextNode(pNodeEap))
{
EAPCFG* pEapcfg = (EAPCFG* )DtlGetData(pNodeEap);
ASSERT( pEapcfg );
hr = S_OK;
pbData = NULL;
TraceTag (ttidLanUi, "Calling HrElGetCustomAuthData for EAP %ld",
pEapcfg->dwKey);
cbData = 0;
// Get the size of the EAP blob
hr = HrElGetCustomAuthData (
wszIntfGuid,
pEapcfg->dwKey,
EapolIntfParams.dwSizeOfSSID,
EapolIntfParams.bSSID,
NULL,
&cbData
);
if (!SUCCEEDED(hr))
{
if ((EapolIntfParams.dwSizeOfSSID != 0) &&
(hr == HRESULT_FROM_WIN32(ERROR_FILE_NOT_FOUND)))
{
TraceTag (ttidLanUi, "HrElGetCustomAuthData: SSID!= NULL, not found blob for SSID");
// The Last Used SSID did not have a connection
// blob created. Call again for size of blob with
// NULL SSID
EapolIntfParams.dwSizeOfSSID = 0;
// Get the size of the EAP blob
hr = HrElGetCustomAuthData (
wszIntfGuid,
pEapcfg->dwKey,
0,
NULL,
NULL,
&cbData
);
}
if (hr == E_OUTOFMEMORY)
{
if (cbData <= 0)
{
// No EAP blob stored in the registry
TraceTag (ttidLanUi, "HrElGetCustomAuthData: No blob stored in reg at all");
pbData = NULL;
// Will continue processing for errors
// Not exit
hr = S_OK;
}
else
{
TraceTag (ttidLanUi, "HrElGetCustomAuthData: Found auth blob in registry");
// Allocate memory to hold the blob
pbData = (PBYTE) MALLOC (cbData);
if (pbData == NULL)
{
hr = S_OK;
TraceTag (ttidLanUi, "HrElGetCustomAuthData: Error in memory allocation for EAP blob");
continue;
}
ZeroMemory (pbData, cbData);
hr = HrElGetCustomAuthData (
wszIntfGuid,
pEapcfg->dwKey,
EapolIntfParams.dwSizeOfSSID,
EapolIntfParams.bSSID,
pbData,
&cbData
);
if (!SUCCEEDED(hr))
{
TraceTag (ttidLanUi, "HrElGetCustomAuthData: HrElGetCustomAuthData failed with %ld",
LresFromHr(hr));
FREE ( pbData );
hr = S_OK;
continue;
}
TraceTag (ttidLanUi, "HrElGetCustomAuthData: HrElGetCustomAuthData successfully got blob of length %ld"
, cbData);
}
}
else
{
TraceTag (ttidLanUi, "HrElGetCustomAuthData: Not got ERROR_NOT_ENOUGH_MEMORY error; Unknown error !!!");
hr = S_OK;
continue;
}
}
else
{
// HrElGetCustomAuthData will always return
// error with cbData = 0
hr = S_OK;
}
if (pEapcfg->pData != NULL)
{
FREE ( pEapcfg->pData );
}
pEapcfg->pData = (UCHAR *)pbData;
pEapcfg->cbData = cbData;
}
m_pListEapcfgs = pListEapcfgs;
}
else
{
hr = E_FAIL;
}
} while (FALSE);
return LresFromHr(hr);
}
//+---------------------------------------------------------------------------
LRESULT CEapolConfig::SaveEapolConfig(LPWSTR wszIntfGuid, PNDIS_802_11_SSID pndSsid)
{
WCHAR *pwszLastUsedSSID = NULL;
DWORD dwEapFlags = 0;
HRESULT hrOverall = S_OK;
HRESULT hr = S_OK;
// Save the EAP configuration data into the registry
DTLNODE* pNodeEap = NULL;
hr = S_OK;
// Save data for all EAP packages in the registry
if (m_pListEapcfgs == NULL)
{
return LresFromHr(S_OK);
}
if (pndSsid)
{
m_EapolIntfParams.dwSizeOfSSID = pndSsid->SsidLength;
memcpy (m_EapolIntfParams.bSSID, pndSsid->Ssid, pndSsid->SsidLength);
}
for (pNodeEap = DtlGetFirstNode(m_pListEapcfgs);
pNodeEap;
pNodeEap = DtlGetNextNode(pNodeEap))
{
EAPCFG* pcfg = (EAPCFG* )DtlGetData(pNodeEap);
if (pcfg == NULL)
{
continue;
}
hr = S_OK;
// ignore error and continue with next
hr = HrElSetCustomAuthData (
wszIntfGuid,
pcfg->dwKey,
m_EapolIntfParams.dwSizeOfSSID,
m_EapolIntfParams.bSSID,
pcfg->pData,
pcfg->cbData);
if (FAILED (hr))
{
TraceTag (ttidLanUi, "HrElSetCustomAuthData failed");
hrOverall = hr;
hr = S_OK;
}
}
if (m_dwCtlFlags & EAPOL_CTL_LOCKED)
m_EapolIntfParams.dwEapFlags &= ~EAPOL_ENABLED;
hr = HrElSetInterfaceParams (
wszIntfGuid,
&m_EapolIntfParams
);
if (FAILED(hr))
{
TraceTag (ttidLanUi, "HrElSetInterfaceParams enabled failed with error %ld",
LresFromHr(hr));
}
if (hrOverall != S_OK)
{
hr = hrOverall;
}
return LresFromHr(hr);
}
//+---------------------------------------------------------------------------
BOOL CEapolConfig::Is8021XEnabled()
{
return (IS_EAPOL_ENABLED(m_EapolIntfParams.dwEapFlags));
}
//+---------------------------------------------------------------------------
VOID CEapolConfig::Set8021XState(BOOLEAN fSet)
{
if (fSet)
m_EapolIntfParams.dwEapFlags |= EAPOL_ENABLED;
else
m_EapolIntfParams.dwEapFlags &= ~EAPOL_ENABLED;
}
|
ScottWinkler/github-api-app
|
react-ui/src/components/compare/comparestats/commitchart/commitchart.js
|
import React, {Component} from 'react';
import './commitchart.css';
import {BarChart,XAxis,YAxis,CartesianGrid,Tooltip,Legend,Bar} from 'recharts';
export default class CommitChart extends Component {
render() {
return (
<div >
<h2 >Commits in Last Year</h2>
<BarChart data={this.props.commitData} width={600} height={400}>
<XAxis dataKey="name" />
<YAxis />
<CartesianGrid strokeDasharray="3 3" />
<Tooltip />
<Legend />
<Bar dataKey="commits" fill="#8884d8" />
</BarChart>
</div>
)}}
|
moutainhigh/ses-server
|
ses-job/ses-job-executor/ses-instance-one/src/main/java/com/redescooter/ses/instance/one/run/foundation/SendMailTasks.java
|
<reponame>moutainhigh/ses-server
package com.redescooter.ses.instance.one.run.foundation;
import com.redescooter.ses.api.common.enums.job.JobDefaultError;
import com.redescooter.ses.api.common.vo.base.GeneralEnter;
import com.redescooter.ses.api.common.vo.jiguang.JobResult;
import com.redescooter.ses.api.foundation.job.RunSendMailTaskExecutorServiceJob;
import com.xxl.job.core.biz.model.ReturnT;
import com.xxl.job.core.handler.IJobHandler;
import com.xxl.job.core.handler.annotation.XxlJob;
import com.xxl.job.core.log.XxlJobLogger;
import lombok.extern.slf4j.Slf4j;
import org.apache.dubbo.config.annotation.DubboReference;
import org.springframework.stereotype.Component;
import java.util.Calendar;
/**
* @author Mr.lijiating
* @version V1.0
* @Date: 26/12/2019 11:03 上午
* @ClassName: SendMailTasks
* @Function: TODO
*/
@Slf4j
@Component
public class SendMailTasks {
@DubboReference
private RunSendMailTaskExecutorServiceJob runSendMailTaskExecutorServiceJob;
@XxlJob("SendMailTaskExecutor")
public ReturnT<String> sendMails(String param) throws Exception {
//开始时间
Calendar start = Calendar.getInstance();
JobResult jobResult = runSendMailTaskExecutorServiceJob.sendMailTask(new GeneralEnter());
if (jobResult.getStatus().name().equals("SUCCEED")) {
XxlJobLogger.log("本次[SendMailTaskExecutor]任务执行完毕,等待下次执行!");
XxlJobLogger.log("本次任务执行完毕,耗时{}毫秒。", +(Calendar.getInstance().getTimeInMillis() - start.getTimeInMillis()));
//该返回值是简单返回,可根据自身进行设置
return new ReturnT(ReturnT.SUCCESS_CODE, "【SendMailTaskExecutor】执行成功,耗时" + (Calendar.getInstance().getTimeInMillis() - start.getTimeInMillis()) + "毫秒.");
}
return new ReturnT<String>(IJobHandler.FAIL.getCode(), jobResult.getErrorMessage() == null ? JobDefaultError.RJOB_SYSTEM_INTERNAL_ERROR.getErrorMessage() : jobResult.getErrorMessage());
}
}
|
541660139/qjtv
|
app/src/main/java/com/lwd/qjtv/app/utils/GlideConfigGlobal.java
|
<gh_stars>0
package com.lwd.qjtv.app.utils;
import android.widget.ImageView;
import com.jess.arms.widget.imageloader.glide.GlideImageConfig;
import com.lwd.qjtv.R;
/**
* Email:<EMAIL>
* Created by ZhengQian on 2017/6/14.
*/
public class GlideConfigGlobal {
public static GlideImageConfig loadImageView(String url, ImageView imageView){
return GlideImageConfig
.builder()
.url(url)
.errorPic(R.mipmap.video_place_holder)
.placeholder(R.mipmap.video_place_holder)
.imageView(imageView)
.build();
}
}
|
ronaldahmed/SLAM-for-ugv
|
neural-navigation-with-lstm/MARCO/nltk/tokenreader/__init__.py
|
# Natural Language Toolkit: Token Readers
#
# Copyright (C) 2001 University of Pennsylvania
# Author: <NAME> <<EMAIL>>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
#
# $Id: __init__.py,v 1.1.1.1 2004/09/29 21:58:19 adastra Exp $
"""
Classes for reading string representations of tokens.
"""
######################################################################
## Token Reader Interface
######################################################################
# Note that this is not a subclass of TaskI: It takes strings as
# inputs, not tokens; and it's just doing deterministic parsing, not a
# real natural language processing task.
class TokenReaderI:
# [XX] This docstring isn't as clear as I'd like:
"""
An interface for parsing string representations of tokens.
Different token readers can be used to parse different string
representations. The tokens returned by a token reader should
contain all of the information encoded in the string
representation; but this information may be different for
different representations. I.e., different token readers will
return tokens that define different properties.
Many token readers define additional arguments to C{read_token}
and C{read_tokens}, such as C{add_locs}, C{add_context},
C{add_subtoks}, and C{add_text}, which control exactly which
properties are recorded when the token is read. See each
individual token reader's C{read_token} documentation for
information about any additional arguments it supports.
"""
def read_token(s):
"""
@return: The token encoded by the string C{s}.
@rtype: L{Token}
"""
raise NotImplementedError
def read_tokens(s):
"""
@return: A list of the tokens encoded by the string C{s}.
@rtype: C{list} of L{Token}
"""
raise NotImplementedError
######################################################################
## Import token reader implementations.
######################################################################
from nltk.tokenreader.tokenizerbased import *
from nltk.tokenreader.treebank import *
from nltk.tokenreader.tagged import *
from nltk.tokenreader.conll import *
from nltk.tokenreader.ieer import *
######################################################################
## Demo
######################################################################
def demo():
print 'Whitespace separated token reader:'
reader = WhitespaceSeparatedTokenReader(SUBTOKENS='WORDS')
print reader.read_token('tokens separated by spaces', add_locs=True)
print 'Newline separated token reader:'
reader = NewlineSeparatedTokenReader(SUBTOKENS='WORDS')
print reader.read_token('tokens\nseparated\nby\n\nnewlines')
print 'Treebank token reader:'
reader = TreebankTokenReader(SUBTOKENS='WORDS', add_subtoks=False)
print reader.read_token('(DP (DET a) (NP (NN Treebank) (NN Tree)))')
if __name__ == '__main__':
demo()
|
sugin223pl/ta-cdn-fts4sdytg
|
ta/assets/js/custom.js
|
<filename>ta/assets/js/custom.js
function abort(message)
{
$('.errorText').html(message).css('display','block');
throw new Error(message);
}
$('.expandQuote').click(function() {
$('.expand').toggle();
$('.hiddenQuote').toggle();
var classname = $(this).attr('class');
if(classname == 'expand') {
$(this).removeClass('expand').addClass('collapse');
} else if(classname == 'collapse') {
$(this).removeClass('collapse').addClass('expand');
}
});
$('.whyCurrency').click(function() {
$('#voucher').toggle('slow');
});
$('#applyVoucher').click(function() {
$(this).html('').html('<i class="fa fa-circle-o-notch fa-spin"></i>');
$('#code').attr('disabled', true)
});
$('#countryCodeSelect').change(function() {
var option = $('option:selected', this);
var ccode = option.attr('data-country-code');
$('.selectContent').html(ccode);
});
var lStorage = false;
$(".validateButton").click(function(e) {
function validateName(name) {
var regName = /^[a-zA-Z-' ]+$/;
if(!regName.test(name)){
abort('Please enter your full name (first & last name).')
}
}
function validateAddress(address) {
if (address.length < 10) {
abort('Please enter your address')
}
}
function validateMessage(message) {
if (message.length < 10) {
abort('Please enter a message to your host')
}
}
function validateNumber(number) {
if (number.length < 9) {
abort('Please enter a phone number');
} else {
return number = number.replace(/^0+/,'');
}
}
function validateEmail(email) {
if (email.length < 6) {
abort('Please enter your email');
} else {
var emailReg = /^([\w-\.]+@([\w-]+\.)+[\w-]{2,4})?$/;
var validEmail = emailReg.test(email);
if (!validEmail) {
abort('Please enter a valid email');
}
}
}
function ajaxGo(phone) {
$.ajax({
url: "https://v2.namsor.com/NamSorAPIv2/api2/json/phoneCode/James/Eddison/" + phone,
data: '',
type: "GET",
beforeSend: function(xhr) {
xhr.setRequestHeader('X-API-KEY', '9f74e628964d4e31167f03d319faebef');
}, success: function(response, data) {
if(response.verified === true) {
lStorage = true
console.log('Getting response after success done!');
console.log(lStorage);
}
}, complete: function() {
console.log('Getting response after complete');
if(lStorage === true) {
$('.errorText').html('').css('display','none');
var country_code = $('#countryCodeSelect').val();
var prefix_sg = $('#countryCodeSelect option:selected').attr('data-country-code');
$('#prefix_sg').val(prefix_sg);
var number = $('#renterNumber').val();
var val_number = validateNumber(number);
var phone_sg = val_number;
$('#phone_sg').val(phone_sg);
var real_phone = country_code + val_number;
$('#renterNumber').attr('disabled',true);
$('#countryCodeSelect').attr('disabled',true);
$('#phone').val(real_phone);
$(".validateButton").html('').html('<i class="fa fa-circle-o-notch fa-spin"></i>').attr('disabled', true);
$("#bookingRequestForm").submit();
} else {
abort('Please enter a valid phone number');
}
},
});
}
function validatePhone(phone) {
ajaxGo(phone);
console.log('Getting response after calling the function')
console.log(lStorage);
return lStorage;
}
var name = $('#renterFullName').val();
validateName(name);
$('#renterFullName').attr('readonly',true);
var ccode = $('#countryCodeSelect option:selected').attr('data-country-code');
var country_code = $('#countryCodeSelect').val();
var number = $('#renterNumber').val();
validateNumber(number);
var phone = ccode + '' + validateNumber(number);
var email = $('#renterEmail').val();
validateEmail(email);
$('#renterEmail').attr('readonly',true);
var address = $('#renterAddress').val();
validateAddress(address);
$('#renterAddress').attr('readonly',true);
var message = $('#renterMessage').val();
validateMessage(message);
$('#renterMessage').attr('readonly',true);
validatePhone(phone);
return false;
});
// $(".submitButton").click(function(e) {
// var data = JSON.stringify( $(form).serializeArray() );
// console.log( data );
// $.post("demo_test_post.asp", {
// name: "<NAME>",
// city: "Duckburg"
// },
// function(data, status){
// alert("Data: " + data + "\nStatus: " + status);
// });
// return false;
// });
|
brentjm/mettler-toledo-spectra-trender
|
node_modules/node-opcua-factory/dist/factories_builtin_types.js
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* @module node-opcua-factory
*/
const node_opcua_assert_1 = require("node-opcua-assert");
const node_opcua_basic_types_1 = require("node-opcua-basic-types");
const node_opcua_guid_1 = require("node-opcua-guid");
const node_opcua_nodeid_1 = require("node-opcua-nodeid");
const node_opcua_status_code_1 = require("node-opcua-status-code");
const _ = require("underscore");
const types_1 = require("./types");
// tslint:disable:no-empty
// tslint:enable:no-unused-variable
function defaultEncode(value, stream) {
}
function defaultDecode(stream) {
return null;
}
class BasicTypeSchema extends types_1.TypeSchemaBase {
constructor(options) {
super(options);
this.subType = options.subType;
this.encode = options.encode || defaultEncode;
this.decode = options.decode || defaultDecode;
}
}
exports.BasicTypeSchema = BasicTypeSchema;
exports.minDate = new Date(Date.UTC(1601, 0, 1, 0, 0, 0));
function defaultGuidValue() {
return Buffer.alloc(0);
}
function toJSONGuid(value) {
if (typeof value === "string") {
return value;
}
node_opcua_assert_1.assert(value instanceof Buffer);
return value.toString("base64");
}
function encodeAny(value, stream) {
node_opcua_assert_1.assert(false, "type 'Any' cannot be encoded");
}
function decodeAny(stream) {
node_opcua_assert_1.assert(false, "type 'Any' cannot be decoded");
}
function encodeNull(value, stream) {
}
function decodeNull(stream) {
return null;
}
// there are 4 types of DataTypes in opcua:
// Built-In DataType
// Simple DataType
// Complex DataType
// Enumeration
const defaultXmlElement = "";
// Built-In Type
const _defaultType = [
// Built-in DataTypes ( see OPCUA Part III v1.02 - $5.8.2 )
{
name: "Null",
decode: decodeNull,
encode: encodeNull,
defaultValue: null
},
{
name: "Any",
decode: decodeAny,
encode: encodeAny
},
{
name: "Boolean",
decode: node_opcua_basic_types_1.decodeBoolean,
encode: node_opcua_basic_types_1.encodeBoolean,
coerce: node_opcua_basic_types_1.coerceBoolean,
defaultValue: false
},
{ name: "Int8", encode: node_opcua_basic_types_1.encodeInt8, decode: node_opcua_basic_types_1.decodeInt8, defaultValue: 0, coerce: node_opcua_basic_types_1.coerceInt8 },
{ name: "UInt8", encode: node_opcua_basic_types_1.encodeUInt8, decode: node_opcua_basic_types_1.decodeUInt8, defaultValue: 0, coerce: node_opcua_basic_types_1.coerceUInt8 },
{ name: "SByte", encode: node_opcua_basic_types_1.encodeSByte, decode: node_opcua_basic_types_1.decodeSByte, defaultValue: 0, coerce: node_opcua_basic_types_1.coerceSByte },
{ name: "Byte", encode: node_opcua_basic_types_1.encodeByte, decode: node_opcua_basic_types_1.decodeByte, defaultValue: 0, coerce: node_opcua_basic_types_1.coerceByte },
{ name: "Int16", encode: node_opcua_basic_types_1.encodeInt16, decode: node_opcua_basic_types_1.decodeInt16, defaultValue: 0, coerce: node_opcua_basic_types_1.coerceInt16 },
{ name: "UInt16", encode: node_opcua_basic_types_1.encodeUInt16, decode: node_opcua_basic_types_1.decodeUInt16, defaultValue: 0, coerce: node_opcua_basic_types_1.coerceUInt16 },
{ name: "Int32", encode: node_opcua_basic_types_1.encodeInt32, decode: node_opcua_basic_types_1.decodeInt32, defaultValue: 0, coerce: node_opcua_basic_types_1.coerceInt32 },
{ name: "UInt32", encode: node_opcua_basic_types_1.encodeUInt32, decode: node_opcua_basic_types_1.decodeUInt32, defaultValue: 0, coerce: node_opcua_basic_types_1.coerceUInt32 },
{
name: "Int64",
decode: node_opcua_basic_types_1.decodeInt64,
encode: node_opcua_basic_types_1.encodeInt64,
coerce: node_opcua_basic_types_1.coerceInt64,
defaultValue: node_opcua_basic_types_1.coerceInt64(0)
},
{
name: "UInt64",
decode: node_opcua_basic_types_1.decodeUInt64,
encode: node_opcua_basic_types_1.encodeUInt64,
coerce: node_opcua_basic_types_1.coerceUInt64,
defaultValue: node_opcua_basic_types_1.coerceUInt64(0)
},
{
name: "Float",
decode: node_opcua_basic_types_1.decodeFloat,
encode: node_opcua_basic_types_1.encodeFloat,
coerce: node_opcua_basic_types_1.coerceFloat,
defaultValue: 0.0
},
{
name: "Double",
decode: node_opcua_basic_types_1.decodeDouble,
encode: node_opcua_basic_types_1.encodeDouble,
coerce: node_opcua_basic_types_1.coerceDouble,
defaultValue: 0.0
},
{
name: "String",
decode: node_opcua_basic_types_1.decodeString,
encode: node_opcua_basic_types_1.encodeString,
defaultValue: ""
},
// OPC Unified Architecture, part 3.0 $8.26 page 67
{
name: "DateTime",
decode: node_opcua_basic_types_1.decodeDateTime,
encode: node_opcua_basic_types_1.encodeDateTime,
coerce: node_opcua_basic_types_1.coerceDateTime,
defaultValue: exports.minDate
},
{
name: "Guid",
decode: node_opcua_basic_types_1.decodeGuid,
encode: node_opcua_basic_types_1.encodeGuid,
defaultValue: node_opcua_guid_1.emptyGuid
},
{
name: "ByteString",
decode: node_opcua_basic_types_1.decodeByteString,
encode: node_opcua_basic_types_1.encodeByteString,
coerce: node_opcua_basic_types_1.coerceByteString,
defaultValue: null,
toJSON: toJSONGuid
},
{
name: "XmlElement",
decode: node_opcua_basic_types_1.decodeString,
encode: node_opcua_basic_types_1.encodeString,
defaultValue: defaultXmlElement
},
// see OPCUA Part 3 - V1.02 $8.2.1
{
name: "NodeId",
decode: node_opcua_basic_types_1.decodeNodeId,
encode: node_opcua_basic_types_1.encodeNodeId,
coerce: node_opcua_basic_types_1.coerceNodeId,
defaultValue: node_opcua_nodeid_1.makeNodeId
},
{
name: "ExpandedNodeId",
decode: node_opcua_basic_types_1.decodeExpandedNodeId,
encode: node_opcua_basic_types_1.encodeExpandedNodeId,
coerce: node_opcua_basic_types_1.coerceExpandedNodeId,
defaultValue: node_opcua_nodeid_1.makeExpandedNodeId
},
// ----------------------------------------------------------------------------------------
// Simple DataTypes
// ( see OPCUA Part III v1.02 - $5.8.2 )
// Simple DataTypes are subtypes of the Built-in DataTypes. They are handled on the wire like the
// Built-in DataType, i.e. they cannot be distinguished on the wire from their Built-in supertypes.
// Since they are handled like Built-in DataTypes regarding the encoding they cannot have encodings
// defined in the AddressSpace. Clients can read the DataType Attribute of a Variable or VariableType to
// identify the Simple DataType of the Value Attribute. An example of a Simple DataType is Duration. It
// is handled on the wire as a Double but the Client can read the DataType Attribute and thus interpret
// the value as defined by Duration
//
// OPC Unified Architecture, part 4.0 $7.13
// IntegerID: This primitive data type is an UInt32 that is used as an identifier, such as a handle. All values,
// except for 0, are valid.
{
name: "IntegerId",
decode: node_opcua_basic_types_1.decodeUInt32,
encode: node_opcua_basic_types_1.encodeUInt32,
defaultValue: 0xFFFFFFFF
},
// The StatusCode is a 32-bit unsigned integer. The top 16 bits represent the numeric value of the
// code that shall be used for detecting specific errors or conditions. The bottom 16 bits are bit flags
// that contain additional information but do not affect the meaning of the StatusCode.
// 7.33 Part 4 - P 143
{
name: "StatusCode",
decode: node_opcua_status_code_1.decodeStatusCode,
encode: node_opcua_status_code_1.encodeStatusCode,
coerce: node_opcua_status_code_1.coerceStatusCode,
defaultValue: node_opcua_status_code_1.StatusCodes.Good
}
];
/**
* @method registerType
* @param schema {TypeSchemaBase}
*/
function registerType(schema) {
node_opcua_assert_1.assert(typeof schema.name === "string");
if (!_.isFunction(schema.encode)) {
throw new Error("schema " + schema.name + " has no encode function");
}
if (!_.isFunction(schema.decode)) {
throw new Error("schema " + schema.name + " has no decode function");
}
schema.category = types_1.FieldCategory.basic;
const definition = new BasicTypeSchema(schema);
_defaultTypeMap.set(schema.name, definition);
}
exports.registerType = registerType;
exports.registerBuiltInType = registerType;
function unregisterType(typeName) {
_defaultTypeMap.delete(typeName);
}
exports.unregisterType = unregisterType;
/**
* @method findSimpleType
* @param name
* @return {TypeSchemaBase|null}
*/
function findSimpleType(name) {
const typeSchema = _defaultTypeMap.get(name);
node_opcua_assert_1.assert(typeSchema);
node_opcua_assert_1.assert(typeSchema instanceof types_1.TypeSchemaBase);
return typeSchema;
}
exports.findSimpleType = findSimpleType;
// populate the default type map
const _defaultTypeMap = new Map();
_defaultType.forEach(registerType);
function hasBuiltInType(name) {
return _defaultTypeMap.has(name);
}
exports.hasBuiltInType = hasBuiltInType;
function getBuildInType(name) {
return _defaultTypeMap.get(name);
}
exports.getBuildInType = getBuildInType;
/**
* @method findBuiltInType
* find the Builtin Type that this
* @param dataTypeName
* @return {*}
*/
function findBuiltInType(dataTypeName) {
// coerce string or Qualified Name to string
if (dataTypeName.name) {
dataTypeName = dataTypeName.toString();
}
node_opcua_assert_1.assert(typeof dataTypeName === "string", "findBuiltInType : expecting a string " + dataTypeName);
const t = _defaultTypeMap.get(dataTypeName);
if (!t) {
throw new Error("datatype " + dataTypeName + " must be registered");
}
if (t.subType && t.subType !== t.name /* avoid infinite recursion */) {
return findBuiltInType(t.subType);
}
return t;
}
exports.findBuiltInType = findBuiltInType;
function getTypeMap() {
return _defaultTypeMap;
}
exports.getTypeMap = getTypeMap;
//# sourceMappingURL=factories_builtin_types.js.map
|
bswood9321/PHYS-3210
|
Exam2/Exam2_Q3_BSW.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 3 23:49:01 2019
@author: Brandon
"""
import numpy as np
import matplotlib.pyplot as plt
i=1
theta1=np.pi/8
while i<=4:
theta=theta1
w=0
l=2
g=9.82
t=0
m=5
dt=.01
THETA=[theta]
W=[w]
T=[t]
X=[l*np.sin(theta)]
Y=[l-l*np.cos(theta)]
findT=[]
while t<=10:
x=l*np.sin(theta)
y=l-l*np.cos(theta)
f=-m*g*np.sin(theta)
w=w+(f/m)*dt
theta = theta+w*dt
t=t+dt
T.append(t)
X.append(x)
Y.append(y)
THETA.append(theta)
W.append(w)
print('Starting angle: ',theta1)
theta1=theta1+np.pi/8
i=i+1
plt.plot(T,X,label='x')
plt.plot(T,W,label='w')
plt.plot(T,THETA,label='angle')
plt.plot(T,Y,label='y')
plt.legend(loc=(1.04,0))
plt.show()
plt.plot(X,Y)
plt.title('Pendulum position in X,Y plane')
plt.show()
plt.plot(THETA,W)
plt.title('Pendulum angle vs. angular speed')
plt.xlabel('Angle')
plt.ylabel('W')
plt.show()
print('The theoretical Tau is: ',2*np.pi*np.sqrt((l/g)))
print('------------------------------------------------')
print('The X,Y plot makes sense because it is basically plotting a visualization of the pendulum swinging '
'back and forth in front of a viewer.')
print('The angle vs. angular velocity graph also makes sense, as we see the angular velocity is at its max '
'when the pendulum is at the lowest point (an angle of zero), while moving towards the positive theta '
'and is at its min when the pendulum is at its lowest, and moving towards the negative theta values.')
|
kagwicharles/Seniorproject-ui
|
node_modules/@iconify/icons-ic/twotone-qr-code.js
|
<filename>node_modules/@iconify/icons-ic/twotone-qr-code.js
var data = {
"body": "<path opacity=\".3\" d=\"M5 15h4v4H5zM5 5h4v4H5zm10 0h4v4h-4z\" fill=\"currentColor\"/><path d=\"M3 11h8V3H3v8zm2-6h4v4H5V5zM3 21h8v-8H3v8zm2-6h4v4H5v-4zm8-12v8h8V3h-8zm6 6h-4V5h4v4zm0 10h2v2h-2zm-6-6h2v2h-2zm2 2h2v2h-2zm-2 2h2v2h-2zm2 2h2v2h-2zm2-2h2v2h-2zm0-4h2v2h-2zm2 2h2v2h-2z\" fill=\"currentColor\"/>",
"width": 24,
"height": 24
};
exports.__esModule = true;
exports.default = data;
|
DoctorMacc/ViaBackwards
|
core/src/main/java/nl/matsv/viabackwards/protocol/protocol1_12_2to1_13/packets/SoundPackets1_13.java
|
package nl.matsv.viabackwards.protocol.protocol1_12_2to1_13.packets;
import nl.matsv.viabackwards.ViaBackwards;
import nl.matsv.viabackwards.api.rewriters.Rewriter;
import nl.matsv.viabackwards.protocol.protocol1_12_2to1_13.Protocol1_12_2To1_13;
import nl.matsv.viabackwards.protocol.protocol1_12_2to1_13.data.BackwardsMappings;
import nl.matsv.viabackwards.protocol.protocol1_12_2to1_13.data.NamedSoundMapping;
import us.myles.ViaVersion.api.Via;
import us.myles.ViaVersion.api.remapper.PacketRemapper;
import us.myles.ViaVersion.api.type.Type;
import us.myles.ViaVersion.protocols.protocol1_12_1to1_12.ClientboundPackets1_12_1;
import us.myles.ViaVersion.protocols.protocol1_13to1_12_2.ClientboundPackets1_13;
public class SoundPackets1_13 extends Rewriter<Protocol1_12_2To1_13> {
private static final String[] SOUND_SOURCES = {"master", "music", "record", "weather", "block", "hostile", "neutral", "player", "ambient", "voice"};
public SoundPackets1_13(Protocol1_12_2To1_13 protocol) {
super(protocol);
}
@Override
protected void registerPackets() {
protocol.registerOutgoing(ClientboundPackets1_13.NAMED_SOUND, new PacketRemapper() {
@Override
public void registerMap() {
map(Type.STRING);
handler(wrapper -> {
String newSound = wrapper.get(Type.STRING, 0);
String oldSound = NamedSoundMapping.getOldId(newSound);
if (oldSound != null || (oldSound = BackwardsMappings.soundMappings.getNewId(newSound)) != null) {
wrapper.set(Type.STRING, 0, oldSound);
} else if (!Via.getConfig().isSuppressConversionWarnings()) {
ViaBackwards.getPlatform().getLogger().warning("Unknown named sound in 1.13->1.12 protocol: " + newSound);
}
});
}
});
// Stop Sound -> Plugin Message
protocol.registerOutgoing(ClientboundPackets1_13.STOP_SOUND, ClientboundPackets1_12_1.PLUGIN_MESSAGE, new PacketRemapper() {
@Override
public void registerMap() {
handler(wrapper -> {
wrapper.write(Type.STRING, "MC|StopSound");
byte flags = wrapper.read(Type.BYTE);
String source;
if ((flags & 0x01) != 0) {
source = SOUND_SOURCES[wrapper.read(Type.VAR_INT)];
} else {
source = "";
}
String sound;
if ((flags & 0x02) != 0) {
sound = BackwardsMappings.soundMappings.getNewId(wrapper.read(Type.STRING));
if (sound == null) {
sound = "";
}
} else {
sound = "";
}
wrapper.write(Type.STRING, source);
wrapper.write(Type.STRING, sound);
});
}
});
protocol.registerOutgoing(ClientboundPackets1_13.SOUND, new PacketRemapper() {
@Override
public void registerMap() {
map(Type.VAR_INT);
handler(wrapper -> {
int newSound = wrapper.get(Type.VAR_INT, 0);
int oldSound = BackwardsMappings.soundMappings.getNewId(newSound);
if (oldSound == -1) {
wrapper.cancel();
} else {
wrapper.set(Type.VAR_INT, 0, oldSound);
}
});
}
});
}
}
|
fabriziogiudici/bluemarine2-src
|
modules/MusicBrainz/src/main/java/it/tidalwave/bluemarine2/metadata/impl/audio/musicbrainz/MusicBrainzAudioMedatataImporter.java
|
/*
* *********************************************************************************************************************
*
* blueMarine II: Semantic Media Centre
* http://tidalwave.it/projects/bluemarine2
*
* Copyright (C) 2015 - 2021 by Tidalwave s.a.s. (http://tidalwave.it)
*
* *********************************************************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* *********************************************************************************************************************
*
* git clone https://bitbucket.org/tidalwave/bluemarine2-src
* git clone https://github.com/tidalwave-it/bluemarine2-src
*
* *********************************************************************************************************************
*/
package it.tidalwave.bluemarine2.metadata.impl.audio.musicbrainz;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Stream;
import java.io.IOException;
import java.math.BigInteger;
import javax.xml.namespace.QName;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Model;
import org.eclipse.rdf4j.model.ValueFactory;
import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
import org.eclipse.rdf4j.model.vocabulary.DC;
import org.eclipse.rdf4j.model.vocabulary.FOAF;
import org.eclipse.rdf4j.model.vocabulary.RDF;
import org.eclipse.rdf4j.model.vocabulary.RDFS;
import org.musicbrainz.ns.mmd_2.Artist;
import org.musicbrainz.ns.mmd_2.DefTrackData;
import org.musicbrainz.ns.mmd_2.Disc;
import org.musicbrainz.ns.mmd_2.Medium;
import org.musicbrainz.ns.mmd_2.MediumList;
import org.musicbrainz.ns.mmd_2.Offset;
import org.musicbrainz.ns.mmd_2.Recording;
import org.musicbrainz.ns.mmd_2.Relation;
import org.musicbrainz.ns.mmd_2.Relation.AttributeList.Attribute;
import org.musicbrainz.ns.mmd_2.RelationList;
import org.musicbrainz.ns.mmd_2.Release;
import org.musicbrainz.ns.mmd_2.ReleaseGroup;
import org.musicbrainz.ns.mmd_2.ReleaseGroupList;
import org.musicbrainz.ns.mmd_2.ReleaseList;
import it.tidalwave.util.Id;
import it.tidalwave.bluemarine2.util.ModelBuilder;
import it.tidalwave.bluemarine2.model.MediaItem;
import it.tidalwave.bluemarine2.model.MediaItem.Metadata;
import it.tidalwave.bluemarine2.model.vocabulary.*;
import it.tidalwave.bluemarine2.metadata.cddb.CddbAlbum;
import it.tidalwave.bluemarine2.metadata.cddb.CddbMetadataProvider;
import it.tidalwave.bluemarine2.metadata.musicbrainz.MusicBrainzMetadataProvider;
import it.tidalwave.bluemarine2.rest.RestResponse;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import lombok.With;
import lombok.extern.slf4j.Slf4j;
import static java.util.Collections.*;
import static java.util.Comparator.*;
import static java.util.Map.entry;
import static java.util.stream.Collectors.*;
import static it.tidalwave.util.FunctionalCheckedExceptionWrappers.*;
import static it.tidalwave.bluemarine2.util.RdfUtilities.*;
import static it.tidalwave.bluemarine2.model.MediaItem.Metadata.*;
import static it.tidalwave.bluemarine2.metadata.musicbrainz.MusicBrainzMetadataProvider.*;
import static lombok.AccessLevel.PRIVATE;
/***********************************************************************************************************************
*
* @author <NAME>
*
**********************************************************************************************************************/
@Slf4j
@RequiredArgsConstructor
public class MusicBrainzAudioMedatataImporter
{
enum Validation
{
TRACK_OFFSETS_MATCH_REQUIRED,
TRACK_OFFSETS_MATCH_NOT_REQUIRED
}
private static final QName QNAME_SCORE = new QName("http://musicbrainz.org/ns/ext#-2.0", "score");
private static final ValueFactory FACTORY = SimpleValueFactory.getInstance();
private static final String[] TOC_INCLUDES = { "aliases", "artist-credits", "labels", "recordings" };
private static final String[] RELEASE_INCLUDES = { "aliases", "artist-credits", "discids", "labels", "recordings" };
private static final String[] RECORDING_INCLUDES = { "aliases", "artist-credits", "artist-rels" };
private static final IRI SOURCE_MUSICBRAINZ = FACTORY.createIRI(BMMO.NS, "source#musicbrainz");
@Nonnull
private final CddbMetadataProvider cddbMetadataProvider;
@Nonnull
private final MusicBrainzMetadataProvider mbMetadataProvider;
@Getter @Setter
private int trackOffsetsMatchThreshold = 2500;
@Getter @Setter
private int releaseGroupScoreThreshold = 50;
/** If {@code true}, in case of multiple collections to pick from, those that are not the least one are marked as
alternative. */
@Getter @Setter
private boolean discourageCollections = true;
private final Set<String> processedTocs = new HashSet<>();
private static final Map<String, IRI> PERFORMER_MAP = Map.ofEntries(
entry("arranger", BMMO.P_ARRANGER),
entry("balance", BMMO.P_BALANCE),
entry("chorus master", BMMO.P_CHORUS_MASTER),
entry("conductor", MO.P_CONDUCTOR),
entry("editor", BMMO.P_EDITOR),
entry("engineer", MO.P_ENGINEER),
entry("instrument arranger", BMMO.P_ARRANGER),
entry("mastering", BMMO.P_MASTERING),
entry("mix", BMMO.P_MIX),
entry("orchestrator", BMMO.P_ORCHESTRATOR),
entry("performer", MO.P_PERFORMER),
entry("performing orchestra", BMMO.P_ORCHESTRA),
entry("producer", MO.P_PRODUCER),
entry("programming", BMMO.P_PROGRAMMING),
entry("recording", BMMO.P_RECORDING),
entry("remixer", BMMO.P_MIX),
entry("sound", MO.P_ENGINEER),
entry("vocal", MO.P_SINGER),
entry("vocal/additional", BMMO.P_BACKGROUND_SINGER),
entry("vocal/alto vocals", BMMO.P_ALTO),
entry("vocal/background vocals", BMMO.P_BACKGROUND_SINGER),
entry("vocal/baritone vocals", BMMO.P_BARITONE),
entry("vocal/bass-baritone vocals", BMMO.P_BASS_BARITONE),
entry("vocal/bass vocals", BMMO.P_BASS),
entry("vocal/choir vocals", BMMO.P_CHOIR),
entry("vocal/contralto vocals", BMMO.P_CONTRALTO),
entry("vocal/guest", MO.P_SINGER),
entry("vocal/lead vocals", BMMO.P_LEAD_SINGER),
entry("vocal/mezzo-soprano vocals", BMMO.P_MEZZO_SOPRANO),
entry("vocal/other vocals", BMMO.P_BACKGROUND_SINGER),
entry("vocal/solo", BMMO.P_LEAD_SINGER),
entry("vocal/soprano vocals", BMMO.P_SOPRANO),
entry("vocal/spoken vocals", MO.P_SINGER),
entry("vocal/tenor vocals", BMMO.P_TENOR),
entry("instrument", MO.P_PERFORMER),
entry("instrument/accordion", BMMO.P_PERFORMER_ACCORDION),
entry("instrument/acoustic guitar", BMMO.P_PERFORMER_ACOUSTIC_GUITAR),
entry("instrument/acoustic bass guitar", BMMO.P_PERFORMER_ACOUSTIC_BASS_GUITAR),
entry("instrument/agogô", BMMO.P_PERFORMER_AGOGO),
entry("instrument/alto saxophone", BMMO.P_PERFORMER_ALTO_SAX),
entry("instrument/banjo", BMMO.P_PERFORMER_BANJO),
entry("instrument/baritone guitar", BMMO.P_PERFORMER_BARITONE_GUITAR),
entry("instrument/baritone saxophone", BMMO.P_PERFORMER_BARITONE_SAX),
entry("instrument/bass", BMMO.P_PERFORMER_BASS),
entry("instrument/bass clarinet", BMMO.P_PERFORMER_BASS_CLARINET),
entry("instrument/bass drum", BMMO.P_PERFORMER_BASS_DRUM),
entry("instrument/bass guitar", BMMO.P_PERFORMER_BASS_GUITAR),
entry("instrument/bass trombone", BMMO.P_PERFORMER_BASS_TROMBONE),
entry("instrument/bassoon", BMMO.P_PERFORMER_BASSOON),
entry("instrument/bells", BMMO.P_PERFORMER_BELLS),
entry("instrument/berimbau", BMMO.P_PERFORMER_BERIMBAU),
entry("instrument/brass", BMMO.P_PERFORMER_BRASS),
entry("instrument/brushes", BMMO.P_PERFORMER_BRUSHES),
entry("instrument/cello", BMMO.P_PERFORMER_CELLO),
entry("instrument/clarinet", BMMO.P_PERFORMER_CLARINET),
entry("instrument/classical guitar", BMMO.P_PERFORMER_CLASSICAL_GUITAR),
entry("instrument/congas", BMMO.P_PERFORMER_CONGAS),
entry("instrument/cornet", BMMO.P_PERFORMER_CORNET),
entry("instrument/cymbals", BMMO.P_PERFORMER_CYMBALS),
entry("instrument/double bass", BMMO.P_PERFORMER_DOUBLE_BASS),
entry("instrument/drums", BMMO.P_PERFORMER_DRUMS),
entry("instrument/drum machine", BMMO.P_PERFORMER_DRUM_MACHINE),
entry("instrument/electric bass guitar", BMMO.P_PERFORMER_ELECTRIC_BASS_GUITAR),
entry("instrument/electric guitar", BMMO.P_PERFORMER_ELECTRIC_GUITAR),
entry("instrument/electric piano", BMMO.P_PERFORMER_ELECTRIC_PIANO),
entry("instrument/electric sitar", BMMO.P_PERFORMER_ELECTRIC_SITAR),
entry("instrument/electronic drum set", BMMO.P_PERFORMER_ELECTRONIC_DRUM_SET),
entry("instrument/english horn", BMMO.P_PERFORMER_ENGLISH_HORN),
entry("instrument/flugelhorn", BMMO.P_PERFORMER_FLUGELHORN),
entry("instrument/flute", BMMO.P_PERFORMER_FLUTE),
entry("instrument/frame drum", BMMO.P_PERFORMER_FRAME_DRUM),
entry("instrument/french horn", BMMO.P_PERFORMER_FRENCH_HORN),
entry("instrument/glockenspiel", BMMO.P_PERFORMER_GLOCKENSPIEL),
entry("instrument/grand piano", BMMO.P_PERFORMER_GRAND_PIANO),
entry("instrument/guest", BMMO.P_PERFORMER_GUEST),
entry("instrument/guitar", BMMO.P_PERFORMER_GUITAR),
entry("instrument/guitar synthesizer", BMMO.P_PERFORMER_GUITAR_SYNTHESIZER),
entry("instrument/guitars", BMMO.P_PERFORMER_GUITARS),
entry("instrument/handclaps", BMMO.P_PERFORMER_HANDCLAPS),
entry("instrument/hammond organ", BMMO.P_PERFORMER_HAMMOND_ORGAN),
entry("instrument/harmonica", BMMO.P_PERFORMER_HARMONICA),
entry("instrument/harp", BMMO.P_PERFORMER_HARP),
entry("instrument/harpsichord", BMMO.P_PERFORMER_HARPSICHORD),
entry("instrument/hi-hat", BMMO.P_PERFORMER_HIHAT),
entry("instrument/horn", BMMO.P_PERFORMER_HORN),
entry("instrument/keyboard", BMMO.P_PERFORMER_KEYBOARD),
entry("instrument/koto", BMMO.P_PERFORMER_KOTO),
entry("instrument/lute", BMMO.P_PERFORMER_LUTE),
entry("instrument/maracas", BMMO.P_PERFORMER_MARACAS),
entry("instrument/marimba", BMMO.P_PERFORMER_MARIMBA),
entry("instrument/mellophone", BMMO.P_PERFORMER_MELLOPHONE),
entry("instrument/melodica", BMMO.P_PERFORMER_MELODICA),
entry("instrument/oboe", BMMO.P_PERFORMER_OBOE),
entry("instrument/organ", BMMO.P_PERFORMER_ORGAN),
entry("instrument/other instruments", BMMO.P_PERFORMER_OTHER_INSTRUMENTS),
entry("instrument/percussion", BMMO.P_PERFORMER_PERCUSSION),
entry("instrument/piano", BMMO.P_PERFORMER_PIANO),
entry("instrument/piccolo trumpet", BMMO.P_PERFORMER_PICCOLO_TRUMPET),
entry("instrument/pipe organ", BMMO.P_PERFORMER_PIPE_ORGAN),
entry("instrument/psaltery", BMMO.P_PERFORMER_PSALTERY),
entry("instrument/recorder", BMMO.P_PERFORMER_RECORDER),
entry("instrument/reeds", BMMO.P_PERFORMER_REEDS),
entry("instrument/rhodes piano", BMMO.P_PERFORMER_RHODES_PIANO),
entry("instrument/santur", BMMO.P_PERFORMER_SANTUR),
entry("instrument/saxophone", BMMO.P_PERFORMER_SAXOPHONE),
entry("instrument/shakers", BMMO.P_PERFORMER_SHAKERS),
entry("instrument/sitar", BMMO.P_PERFORMER_SITAR),
entry("instrument/slide guitar", BMMO.P_PERFORMER_SLIDE_GUITAR),
entry("instrument/snare drum", BMMO.P_PERFORMER_SNARE_DRUM),
entry("instrument/solo", BMMO.P_PERFORMER_SOLO),
entry("instrument/soprano saxophone", BMMO.P_PERFORMER_SOPRANO_SAX),
entry("instrument/spanish acoustic guitar", BMMO.P_PERFORMER_SPANISH_ACOUSTIC_GUITAR),
entry("instrument/steel guitar", BMMO.P_PERFORMER_STEEL_GUITAR),
entry("instrument/synclavier", BMMO.P_PERFORMER_SYNCLAVIER),
entry("instrument/synthesizer", BMMO.P_PERFORMER_SYNTHESIZER),
entry("instrument/tambourine", BMMO.P_PERFORMER_TAMBOURINE),
entry("instrument/tenor saxophone", BMMO.P_PERFORMER_TENOR_SAX),
entry("instrument/timbales", BMMO.P_PERFORMER_TIMBALES),
entry("instrument/timpani", BMMO.P_PERFORMER_TIMPANI),
entry("instrument/tiple", BMMO.P_PERFORMER_TIPLE),
entry("instrument/trombone", BMMO.P_PERFORMER_TROMBONE),
entry("instrument/trumpet", BMMO.P_PERFORMER_TRUMPET),
entry("instrument/tuba", BMMO.P_PERFORMER_TUBA),
entry("instrument/tubular bells", BMMO.P_PERFORMER_TUBULAR_BELLS),
entry("instrument/tuned percussion", BMMO.P_PERFORMER_TUNED_PERCUSSION),
entry("instrument/ukulele", BMMO.P_PERFORMER_UKULELE),
entry("instrument/vibraphone", BMMO.P_PERFORMER_VIBRAPHONE),
entry("instrument/viola", BMMO.P_PERFORMER_VIOLA),
entry("instrument/viola da gamba", BMMO.P_PERFORMER_VIOLA_DA_GAMBA),
entry("instrument/violin", BMMO.P_PERFORMER_VIOLIN),
entry("instrument/whistle", BMMO.P_PERFORMER_WHISTLE),
entry("instrument/xylophone", BMMO.P_PERFORMER_XYLOPHONE));
/*******************************************************************************************************************
*
* Aggregate of a {@link Release}, a {@link Medium} inside that {@code Release} and a {@link Disc} inside that
* {@code Medium}.
*
******************************************************************************************************************/
@RequiredArgsConstructor @AllArgsConstructor @Getter
static class ReleaseMediumDisk
{
@Nonnull
private final Release release;
@Nonnull
private final Medium medium;
@With
private Disc disc;
@With
private boolean alternative;
private String embeddedTitle;
private int score;
/***************************************************************************************************************
*
**************************************************************************************************************/
@Nonnull
public ReleaseMediumDisk withEmbeddedTitle (@Nonnull final String embeddedTitle)
{
return new ReleaseMediumDisk(release, medium, disc, alternative, embeddedTitle,
similarity(pickTitle(), embeddedTitle));
}
/***************************************************************************************************************
*
* Prefer Medium title - typically available in case of disk collections, in which case Release has got
* the collection title, which is very generic.
*
**************************************************************************************************************/
@Nonnull
public String pickTitle()
{
return Optional.ofNullable(medium.getTitle()).orElse(release.getTitle());
}
/***************************************************************************************************************
*
**************************************************************************************************************/
@Nonnull
public ReleaseMediumDisk alternativeIf (final boolean condition)
{
return withAlternative(alternative || condition);
}
/***************************************************************************************************************
*
**************************************************************************************************************/
@Nonnull
public Id computeId()
{
return createSha1IdNew(getRelease().getId() + "+" + getDisc().getId());
}
/***************************************************************************************************************
*
**************************************************************************************************************/
@Nonnull
public Optional<Integer> getDiskCount()
{
return Optional.ofNullable(release.getMediumList()).map(MediumList::getCount).map(BigInteger::intValue);
}
/***************************************************************************************************************
*
**************************************************************************************************************/
@Nonnull
public Optional<Integer> getDiskNumber()
{
return Optional.ofNullable(medium.getPosition()).map(BigInteger::intValue);
}
/***************************************************************************************************************
*
**************************************************************************************************************/
@Nonnull
public Optional<String> getAsin()
{
return Optional.ofNullable(release.getAsin());
}
/***************************************************************************************************************
*
**************************************************************************************************************/
@Nonnull
public Optional<String> getBarcode()
{
return Optional.ofNullable(release.getBarcode());
}
/***************************************************************************************************************
*
**************************************************************************************************************/
@Nonnull
public Cddb getCddb()
{
return MediaItem.Metadata.Cddb.builder()
.discId("") // FIXME
.trackFrameOffsets(disc.getOffsetList().getOffset()
.stream()
.map(Offset::getValue)
.mapToInt(BigInteger::intValue)
.toArray())
.build();
}
/***************************************************************************************************************
*
**************************************************************************************************************/
@Nonnull
public String getMediumAndDiscString()
{
return String.format("%s/%s", medium.getTitle(), (disc != null) ? disc.getId() : "null");
}
/***************************************************************************************************************
*
**************************************************************************************************************/
@Override
public boolean equals (@Nullable final Object other)
{
if (this == other)
{
return true;
}
if ((other == null) || (getClass() != other.getClass()))
{
return false;
}
return Objects.equals(this.computeId(), ((ReleaseMediumDisk)other).computeId());
}
/***************************************************************************************************************
*
**************************************************************************************************************/
@Override
public int hashCode()
{
return computeId().hashCode();
}
/***************************************************************************************************************
*
**************************************************************************************************************/
@Override @Nonnull
public String toString()
{
return String.format("ALT: %-5s %s ASIN: %-10s BARCODE: %-13s SCORE: %4d #: %3s/%3s " +
"TITLES: PICKED: %s EMBEDDED: %s RELEASE: %s MEDIUM: %s",
alternative,
release.getId(),
release.getAsin(),
release.getBarcode(),
getScore(),
getDiskNumber().map(Number::toString).orElse(""),
getDiskCount().map(Number::toString).orElse(""),
pickTitle(), embeddedTitle, release.getTitle(), medium.getTitle());
}
}
/*******************************************************************************************************************
*
* Aggregate of a {@link Relation} and a target type.
*
******************************************************************************************************************/
@RequiredArgsConstructor(access = PRIVATE) @Getter
static class RelationAndTargetType
{
@Nonnull
private final Relation relation;
@Nonnull
private final String targetType;
@Nonnull
public static Stream<RelationAndTargetType> toStream (@Nonnull final RelationList relationList)
{
return relationList.getRelation().stream()
.map(rel -> new RelationAndTargetType(rel, relationList.getTargetType()));
}
}
/*******************************************************************************************************************
*
* Downloads and imports MusicBrainz data for the given {@link Metadata}.
*
* @param metadata the {@code Metadata}
* @return the RDF triples
* @throws InterruptedException in case of I/O error
* @throws IOException in case of I/O error
*
******************************************************************************************************************/
@Nonnull
public Optional<Model> handleMetadata (@Nonnull final Metadata metadata)
throws InterruptedException, IOException
{
final ModelBuilder model = createModelBuilder();
final Optional<String> optionalAlbumTitle = metadata.get(ALBUM);
final Optional<Cddb> optionalCddb = metadata.get(CDDB);
if (optionalAlbumTitle.isPresent() && !optionalAlbumTitle.get().trim().isEmpty() && optionalCddb.isPresent())
{
final String albumTitle = optionalAlbumTitle.get();
final Cddb cddb = optionalCddb.get();
final String toc = cddb.getToc();
synchronized (processedTocs)
{
if (processedTocs.contains(toc))
{
return Optional.empty();
}
processedTocs.add(toc);
}
log.info("QUERYING MUSICBRAINZ FOR TOC OF: {}", albumTitle);
final List<ReleaseMediumDisk> rmds = new ArrayList<>();
final RestResponse<ReleaseList> releaseList = mbMetadataProvider.findReleaseListByToc(toc, TOC_INCLUDES);
// even though we're querying by TOC, matching offsets is required to kill many false results
releaseList.ifPresent(releases -> rmds.addAll(findReleases(releases, cddb, Validation.TRACK_OFFSETS_MATCH_REQUIRED)));
if (rmds.isEmpty())
{
log.info("TOC NOT FOUND, QUERYING MUSICBRAINZ FOR TITLE: {}", albumTitle);
final List<ReleaseGroup> releaseGroups = new ArrayList<>();
releaseGroups.addAll(mbMetadataProvider.findReleaseGroupByTitle(albumTitle)
.map(ReleaseGroupList::getReleaseGroup)
.orElse(emptyList()));
final Optional<String> alternateTitle = cddbAlternateTitleOf(metadata);
alternateTitle.ifPresent(t -> log.info("ALSO USING ALTERNATE TITLE: {}", t));
releaseGroups.addAll(alternateTitle.map(_f(mbMetadataProvider::findReleaseGroupByTitle))
.map(response -> response.get().getReleaseGroup())
.orElse(emptyList()));
rmds.addAll(findReleases(releaseGroups, cddb, Validation.TRACK_OFFSETS_MATCH_REQUIRED));
}
model.with(markedAlternative(rmds, albumTitle).stream()
.parallel()
.map(_f(rmd -> handleRelease(metadata, rmd)))
.collect(toList()));
}
return Optional.of(model.toModel());
}
/*******************************************************************************************************************
*
* Given a valid list of {@link ReleaseMediumDisk}s - that is, that has been already validated and correctly matches
* the searched record - if it contains more than one element picks the most suitable one. Unwanted elements are
* not filtered out, because it's not always possible to automatically pick the best one: in fact, some entries
* might differ for ASIN or barcode; or might be items individually sold or part of a collection. It makes sense to
* offer the user the possibility of manually pick them later. So, instead of being filtered out, those elements
* are marked as "alternative" (and they will be later marked as such in the triple store).
*
* These are the performed steps:
*
* <ol>
* <li>Eventual duplicates are collapsed.</li>
* <li>If required, in case of members of collections, collections that are larger than the least are marked as
* alternative.</li>
* <li>A matching score is computed about the affinity of the title found in MusicBrainz metadata with respect
* to the title in the embedded metadata: elements that don't reach the maximum score are marked as alternative.
* </li>
* <li>If at least one element has got an ASIN, other elements that don't bear it are marked as alternative.</li>
* <li>If at least one element has got a barcode, other elements that don't bear it are marked as alternative.</li>
* <li>If the pick is not unique yet, an ASIN is picked as the first in lexicographic order and elements not
* bearing it are marked as alternative.</li>
* <li>If the pick is not unique yet, a barcode is picked as the first in lexicographic order and elements not
* bearing it are marked as alternative.</li>
* <li>If the pick is not unique yet, elements other than the first one are marked as alternative.</i>
* </ol>
*
* The last criteria are implemented for giving consistency to automated tests, considering that the order in which
* elements are found is not guaranteed because of multi-threading.
*
* @param inRmds the incoming {@code ReleaseAndMedium}s
* @param embeddedTitle the album title found in the file
* @return the processed {@code ReleaseAndMedium}s
*
******************************************************************************************************************/
@Nonnull
private List<ReleaseMediumDisk> markedAlternative (@Nonnull final List<ReleaseMediumDisk> inRmds,
@Nonnull final String embeddedTitle)
{
if (inRmds.size() <= 1)
{
return inRmds;
}
List<ReleaseMediumDisk> rmds = inRmds.stream()
.map(rmd -> rmd.withEmbeddedTitle(embeddedTitle))
.distinct()
.collect(toList());
rmds = discourageCollections ? markedAlternativeIfNotLeastCollection(rmds) : rmds;
rmds = markedAlternativeByTitleAffinity(rmds);
rmds = markedAlternativeByAsinOrBarcode(rmds);
rmds = markedAlternativeButTheFirstNotAlternative(rmds);
synchronized (log) // keep log lines together
{
log.info("MULTIPLE RESULTS");
rmds.forEach(rmd -> log.info(">>> MULTIPLE RESULTS: {}", rmd));
}
final int count = countOfNotAlternative(rmds);
assert count == 1 : "Still too many items not alternative: " + count;
return rmds;
}
/*******************************************************************************************************************
*
* @param rmds the incoming {@code ReleaseMediumDisk}
* @return the processed {@code ReleaseMediumDisk}
*
******************************************************************************************************************/
@Nonnull
private static List<ReleaseMediumDisk> markedAlternativeByAsinOrBarcode (@Nonnull List<ReleaseMediumDisk> rmds)
{
final boolean asinPresent = rmds.stream().anyMatch(rmd -> !rmd.isAlternative() && rmd.getAsin().isPresent());
rmds = markedAlternative(rmds, rmd -> asinPresent && rmd.getAsin().isEmpty());
final boolean barcodePresent =
rmds.stream().anyMatch(rmd -> !rmd.isAlternative() && rmd.getBarcode().isPresent());
rmds = markedAlternative(rmds, rmd -> barcodePresent && rmd.getBarcode().isEmpty());
if (asinPresent && (countOfNotAlternative(rmds) > 1))
{
final Optional<String> asin = findFirstNotInAlternative(rmds, rmd -> rmd.getAsin());
rmds = markedAlternative(rmds, rmd -> !rmd.getAsin().equals(asin));
}
if (barcodePresent && (countOfNotAlternative(rmds) > 1))
{
final Optional<String> barcode = findFirstNotInAlternative(rmds, rmd -> rmd.getBarcode());
rmds = markedAlternative(rmds, rmd -> !rmd.getBarcode().equals(barcode));
}
return rmds;
}
/*******************************************************************************************************************
*
* Sweeps the given {@link ReleaseMediumDisk}s and marks as alternative all the items after a not alternative item.
*
* @param rmds the incoming {@code ReleaseMediumDisk}
* @return the processed {@code ReleaseMediumDisk}
*
******************************************************************************************************************/
@Nonnull
private static List<ReleaseMediumDisk> markedAlternativeButTheFirstNotAlternative (@Nonnull final List<ReleaseMediumDisk> rmds)
{
if (countOfNotAlternative(rmds) <= 1)
{
return rmds;
}
final ReleaseMediumDisk pick = rmds.stream()
.filter(rmd -> !rmd.isAlternative())
.sorted(comparing(rmd -> rmd.getRelease().getId())) // Fix for BMT-166
.findFirst()
.get();
return markedAlternative(rmds, rmd -> rmd != pick);
}
/*******************************************************************************************************************
*
* Sweeps the given {@link ReleaseMediumDisk}s and marks as alternative all the items which are not part of the
* disk collections with the minimum size.
*
* @param rmds the incoming {@code ReleaseMediumDisk}s
* @return the processed {@code ReleaseMediumDisk}s
*
******************************************************************************************************************/
@Nonnull
private static List<ReleaseMediumDisk> markedAlternativeIfNotLeastCollection (@Nonnull final List<ReleaseMediumDisk> rmds)
{
final int leastSize = rmds.stream().filter(rmd -> !rmd.isAlternative())
.mapToInt(rmd -> rmd.getDiskCount().orElse(1))
.min().getAsInt();
return markedAlternative(rmds, rmd -> rmd.getDiskCount().orElse(1) > leastSize);
}
/*******************************************************************************************************************
*
* Sweeps the given {@link ReleaseMediumDisk}s and marks as alternative the items without the best score.
*
* @param rmds the incoming {@code ReleaseMediumDisk}
* @return the processed {@code ReleaseMediumDisk}
*
******************************************************************************************************************/
@Nonnull
private static List<ReleaseMediumDisk> markedAlternativeByTitleAffinity (@Nonnull final List<ReleaseMediumDisk> rmds)
{
final int bestScore = rmds.stream().filter(rmd -> !rmd.isAlternative())
.mapToInt(ReleaseMediumDisk::getScore)
.max().getAsInt();
return markedAlternative(rmds, rmd -> rmd.getScore() < bestScore);
}
/*******************************************************************************************************************
*
* Creates a copy of the collection where items have been marked alternative if the given predicate applies.
*
* @param rmds the source
* @param predicate the predicate to decide whether an item must be marked as alternative
* @return the processed collection
*
******************************************************************************************************************/
@Nonnull
private static List<ReleaseMediumDisk> markedAlternative (@Nonnull final List<ReleaseMediumDisk> rmds,
@Nonnull final Predicate<ReleaseMediumDisk> predicate)
{
return rmds.stream().map(rmd -> rmd.alternativeIf(predicate.test(rmd))).collect(toList());
}
/*******************************************************************************************************************
*
* Finds the first attribute specified by an extractor among items not already marked as alternatives.
*
* @param rmds the collection to search into
* @param extractor the extractor
* @return the searched object
*
******************************************************************************************************************/
@Nonnull
private static <T extends Comparable<?>> Optional<T> findFirstNotInAlternative (
@Nonnull final List<ReleaseMediumDisk> rmds,
@Nonnull final Function<ReleaseMediumDisk, Optional<T>> extractor)
{
return rmds.stream()
.filter(rmd -> !rmd.isAlternative())
.map(extractor)
.flatMap(Optional::stream)
.sorted()
.findFirst();
}
/*******************************************************************************************************************
*
******************************************************************************************************************/
@Nonnegative
private static int countOfNotAlternative (@Nonnull final List<ReleaseMediumDisk> rmds)
{
return (int)rmds.stream().filter(rmd -> !rmd.isAlternative()).count();
}
/*******************************************************************************************************************
*
* Extracts data from the given release. For MusicBrainz, a Release is typically a disk, but it can be made of
* multiple disks in case of many tracks.
*
* @param metadata the {@code Metadata}
* @param rmd the release
* @return the RDF triples
* @throws InterruptedException in case of I/O error
* @throws IOException in case of I/O error
*
******************************************************************************************************************/
@Nonnull
private ModelBuilder handleRelease (@Nonnull final Metadata metadata, @Nonnull final ReleaseMediumDisk rmd)
throws IOException, InterruptedException
{
final Medium medium = rmd.getMedium();
final String releaseId = rmd.getRelease().getId();
final List<DefTrackData> tracks = medium.getTrackList().getDefTrack();
final String embeddedRecordTitle = metadata.get(ALBUM).get(); // .orElse(parent.getPath().toFile().getName());
final Cddb cddb = metadata.get(CDDB).get();
final String recordTitle = rmd.pickTitle();
final IRI embeddedRecordIri = recordIriOf(metadata, embeddedRecordTitle);
final IRI recordIri = BMMO.recordIriFor(rmd.computeId());
log.info("importing {} {} ...", recordTitle, (rmd.isAlternative() ? "(alternative)" : ""));
ModelBuilder model = createModelBuilder()
.with(recordIri, RDF.TYPE, MO.C_RECORD)
.with(recordIri, RDFS.LABEL, literalFor(recordTitle))
.with(recordIri, DC.TITLE, literalFor(recordTitle))
.with(recordIri, BMMO.P_IMPORTED_FROM, BMMO.O_SOURCE_MUSICBRAINZ)
.with(recordIri, BMMO.P_ALTERNATE_OF, embeddedRecordIri)
.with(recordIri, MO.P_MEDIA_TYPE, MO.C_CD)
.with(recordIri, MO.P_TRACK_COUNT, literalFor(tracks.size()))
.with(recordIri, MO.P_MUSICBRAINZ_GUID, literalFor(releaseId))
.with(recordIri, MO.P_MUSICBRAINZ, musicBrainzIriFor("release", releaseId))
.with(recordIri, MO.P_AMAZON_ASIN, literalFor(rmd.getAsin()))
.with(recordIri, MO.P_GTIN, literalFor(rmd.getBarcode()))
.with(tracks.stream().parallel()
.map(_f(track -> handleTrack(rmd, cddb, recordIri, track)))
.collect(toList()));
if (rmd.isAlternative())
{
model = model.with(recordIri, BMMO.P_ALTERNATE_PICK_OF, embeddedRecordIri);
}
return model;
// TODO: release.getLabelInfoList();
// TODO: record producer - requires inc=artist-rels
}
/*******************************************************************************************************************
*
* Extracts data from the given {@link DefTrackData}.
*
* @param rmd the release
* @param cddb the CDDB of the track we're handling
* @param track the track
* @return the RDF triples
* @throws InterruptedException in case of I/O error
* @throws IOException in case of I/O error
*
******************************************************************************************************************/
@Nonnull
private ModelBuilder handleTrack (@Nonnull final ReleaseMediumDisk rmd,
@Nonnull final Cddb cddb,
@Nonnull final IRI recordIri,
@Nonnull final DefTrackData track)
throws IOException, InterruptedException
{
final IRI trackIri = trackIriOf(track.getId());
final int trackNumber = track.getPosition().intValue();
final Optional<Integer> diskCount = emptyIfOne(rmd.getDiskCount());
final Optional<Integer> diskNumber = diskCount.flatMap(dc -> rmd.getDiskNumber());
final String recordingId = track.getRecording().getId();
// final Recording recording = track.getRecording();
final Recording recording = mbMetadataProvider.getResource(RECORDING, recordingId, RECORDING_INCLUDES).get();
final String trackTitle = recording.getTitle();
// track.getRecording().getAliasList().getAlias().get(0).getSortName();
final IRI signalIri = signalIriFor(cddb, track.getPosition().intValue());
log.info(">>>>>>>> {}. {}", trackNumber, trackTitle);
return createModelBuilder()
.with(recordIri, MO.P_TRACK, trackIri)
.with(recordIri, BMMO.P_DISK_COUNT, literalForInt(diskCount))
.with(recordIri, BMMO.P_DISK_NUMBER, literalForInt(diskNumber))
.with(signalIri, MO.P_PUBLISHED_AS, trackIri)
.with(trackIri, RDF.TYPE, MO.C_TRACK)
.with(trackIri, RDFS.LABEL, literalFor(trackTitle))
.with(trackIri, DC.TITLE, literalFor(trackTitle))
.with(trackIri, BMMO.P_IMPORTED_FROM, BMMO.O_SOURCE_MUSICBRAINZ)
.with(trackIri, MO.P_TRACK_NUMBER, literalFor(trackNumber))
.with(trackIri, MO.P_MUSICBRAINZ_GUID, literalFor(track.getId()))
.with(trackIri, MO.P_MUSICBRAINZ, musicBrainzIriFor("track", track.getId()))
.with(handleTrackRelations(signalIri, trackIri, recordIri, recording));
}
/*******************************************************************************************************************
*
* Extracts data from the relations of the given {@link Recording}.
*
* @param signalIri the IRI of the signal associated to the track we're handling
* @param recording the {@code Recording}
* @return the RDF triples
*
******************************************************************************************************************/
@Nonnull
private ModelBuilder handleTrackRelations (@Nonnull final IRI signalIri,
@Nonnull final IRI trackIri,
@Nonnull final IRI recordIri,
@Nonnull final Recording recording)
{
return createModelBuilder().with(recording.getRelationList()
.stream()
.parallel()
.flatMap(RelationAndTargetType::toStream)
.map(ratt -> handleTrackRelation(signalIri, trackIri, recordIri, recording, ratt))
.collect(toList()));
}
/*******************************************************************************************************************
*
* Extracts data from a relation of the given {@link Recording}.
*
* @param signalIri the IRI of the signal associated to the track we're handling
* @param recording the {@code Recording}
* @param ratt the relation
* @return the RDF triples
*
******************************************************************************************************************/
@Nonnull
private ModelBuilder handleTrackRelation (@Nonnull final IRI signalIri,
@Nonnull final IRI trackIri,
@Nonnull final IRI recordIri,
@Nonnull final Recording recording,
@Nonnull final RelationAndTargetType ratt)
{
final Relation relation = ratt.getRelation();
final String targetType = ratt.getTargetType();
final List<Attribute> attributes = getAttributes(relation);
// final Target target = relation.getTarget();
final String type = relation.getType();
final Artist artist = relation.getArtist();
log.info(">>>>>>>>>>>> {} {} {} {} ({})", targetType,
type,
attributes.stream().map(a -> toString(a)).collect(toList()),
artist.getName(),
artist.getId());
final IRI performanceIri = performanceIriFor(recording.getId());
final IRI artistIri = artistIriOf(artist.getId());
final ModelBuilder model = createModelBuilder()
.with(performanceIri, RDF.TYPE, MO.C_PERFORMANCE)
.with(performanceIri, BMMO.P_IMPORTED_FROM, BMMO.O_SOURCE_MUSICBRAINZ)
.with(performanceIri, MO.P_MUSICBRAINZ_GUID, literalFor(recording.getId()))
.with(performanceIri, MO.P_RECORDED_AS, signalIri)
.with(artistIri, RDF.TYPE, MO.C_MUSIC_ARTIST)
.with(artistIri, RDFS.LABEL, literalFor(artist.getName()))
.with(artistIri, FOAF.NAME, literalFor(artist.getName()))
.with(artistIri, BMMO.P_IMPORTED_FROM, BMMO.O_SOURCE_MUSICBRAINZ)
.with(artistIri, MO.P_MUSICBRAINZ_GUID, literalFor(artist.getId()))
.with(artistIri, MO.P_MUSICBRAINZ, musicBrainzIriFor("artist", artist.getId()))
// TODO these could be inferred - performance shortcuts. Catalog queries rely upon these.
.with(recordIri, FOAF.MAKER, artistIri)
.with(trackIri, FOAF.MAKER, artistIri)
.with(performanceIri, FOAF.MAKER, artistIri);
// .with(signalIri, FOAF.MAKER, artistIri);
if ("artist".equals(targetType))
{
predicatesForArtists(type, attributes)
.forEach(predicate -> model.with(performanceIri, predicate, artistIri));
}
return model;
// relation.getBegin();
// relation.getEnd();
// relation.getEnded();
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
@Nonnull
private static List<IRI> predicatesForArtists (@Nonnull final String type, @Nonnull final List<Attribute> attributes)
{
if (attributes.isEmpty())
{
return singletonList(predicateFor(type));
}
else
{
return attributes.stream().map(attribute ->
{
String role = type;
if (type.equals("vocal") || type.equals("instrument"))
{
role += "/" + attribute.getContent();
}
return predicateFor(role);
}).collect(toList());
}
}
/*******************************************************************************************************************
*
* Given a list of {@link ReleaseGroup}s, navigates into it and extract all CD {@link Medium}s that match the
* given CDDB track offsets.
*
* @param releaseGroups the {@code ReleaseGroup}s
* @param cddb the track offsets
* @param validation how the results must be validated
* @return a collection of filtered {@code Medium}s
*
******************************************************************************************************************/
@Nonnull
private Collection<ReleaseMediumDisk> findReleases (@Nonnull final List<ReleaseGroup> releaseGroups,
@Nonnull final Cddb cddb,
@Nonnull final Validation validation)
{
return releaseGroups.stream()
.parallel()
.filter(releaseGroup -> scoreOf(releaseGroup) >= releaseGroupScoreThreshold)
.peek(this::logArtists)
.map(ReleaseGroup::getReleaseList)
.flatMap(releaseList -> findReleases(releaseList, cddb, validation).stream())
.collect(toList());
}
/*******************************************************************************************************************
*
* Given a {@link ReleaseList}, navigates into it and extract all CD {@link Medium}s that match the given CDDB track
* offsets.
*
* @param releaseList the {@code ReleaseList}
* @param cddb the track offsets to match
* @param validation how the results must be validated
* @return a collection of filtered {@code Medium}s
*
******************************************************************************************************************/
@Nonnull
private Collection<ReleaseMediumDisk> findReleases (@Nonnull final ReleaseList releaseList,
@Nonnull final Cddb cddb,
@Nonnull final Validation validation)
{
return releaseList.getRelease().stream()
.parallel()
// .peek(this::logArtists)
.peek(release -> log.info(">>>>>>>> release: {} {}", release.getId(), release.getTitle()))
.flatMap(_f(release -> mbMetadataProvider.getResource(RELEASE, release.getId(), RELEASE_INCLUDES).get()
.getMediumList().getMedium()
.stream()
.map(medium -> new ReleaseMediumDisk(release, medium))))
.filter(MusicBrainzAudioMedatataImporter::matchesFormat)
.flatMap(rmd -> rmd.getMedium().getDiscList().getDisc().stream().map(rmd::withDisc))
.filter(rmd -> matchesTrackOffsets(rmd, cddb, validation))
.peek(rmd -> log.info(">>>>>>>> FOUND {} - with score {}", rmd.getMediumAndDiscString(), 0 /* scoreOf(releaseGroup) FIXME */))
.collect(toMap(rmd -> rmd.getRelease().getId(), rmd -> rmd, (u, v) -> v, TreeMap::new))
.values();
}
/*******************************************************************************************************************
*
*
*
*
******************************************************************************************************************/
public static int similarity (@Nonnull final String a, @Nonnull final String b)
{
int score = StringUtils.getFuzzyDistance(a.toLowerCase(), b.toLowerCase(), Locale.UK);
//
// While this is a hack, it isn't so ugly as it might appear. The idea is to give a lower score to
// collections and records with a generic title, hoping that a better one is picked.
// FIXME: put into a map and then into an external resource with the delta score associated.
// FIXME: with the filtering on collection size, this might be useless?
//
if (a.matches("^Great Violin Concertos.*")
|| a.matches("^CBS Great Performances.*"))
{
score -= 50;
}
if (a.matches("^Piano Concertos$")
|| a.matches("^Klavierkonzerte$"))
{
score -= 30;
}
return score;
}
/*******************************************************************************************************************
*
* Returns {@code true} if the given {@link ReleaseMediumDisk} is of a meaningful type (that is, a CD) or it's not set.
*
* @param rmd the {@code ReleaseMediumDisk}
* @return {@code true} if there is a match
*
******************************************************************************************************************/
private static boolean matchesFormat (@Nonnull final ReleaseMediumDisk rmd)
{
final String format = rmd.getMedium().getFormat();
if ((format != null) && !"CD".equals(format))
{
log.info(">>>>>>>> discarded {} because not a CD ({})", rmd.getMediumAndDiscString(), format);
return false;
}
return true;
}
/*******************************************************************************************************************
*
* Returns {@code true} if the given {@link ReleaseMediumDisk} matches the track offsets in the given {@link Cddb}.
*
* @param rmd the {@code ReleaseMediumDisk}
* @param requestedCddb the track offsets to match
* @param validation how the results must be validated
* @return {@code true} if there is a match
*
******************************************************************************************************************/
private boolean matchesTrackOffsets (@Nonnull final ReleaseMediumDisk rmd,
@Nonnull final Cddb requestedCddb,
@Nonnull final Validation validation)
{
final Cddb cddb = rmd.getCddb();
if ((cddb == null) && (validation == Validation.TRACK_OFFSETS_MATCH_NOT_REQUIRED))
{
log.info(">>>>>>>> no track offsets, but not required");
return true;
}
final boolean matches = requestedCddb.matches(cddb, trackOffsetsMatchThreshold);
if (!matches)
{
synchronized (log) // keep log lines together
{
log.info(">>>>>>>> discarded {} because track offsets don't match", rmd.getMediumAndDiscString());
log.debug(">>>>>>>> iTunes offsets: {}", requestedCddb.getTrackFrameOffsets());
log.debug(">>>>>>>> found offsets: {}", cddb.getTrackFrameOffsets());
}
}
return matches;
}
/*******************************************************************************************************************
*
* Searches for an alternate title of a record by querying the embedded title against the CDDB. The CDDB track
* offsets are checked to validate the result.
*
* @param metadata the {@code Metadata}
* @return the title, if found
*
******************************************************************************************************************/
@Nonnull
private Optional<String> cddbAlternateTitleOf (@Nonnull final Metadata metadata)
throws IOException, InterruptedException
{
final RestResponse<CddbAlbum> optionalAlbum = cddbMetadataProvider.findCddbAlbum(metadata);
if (!optionalAlbum.isPresent())
{
return Optional.empty();
}
final CddbAlbum album = optionalAlbum.get();
final Cddb albumCddb = album.getCddb();
final Cddb requestedCddb = metadata.get(ITUNES_COMMENT).get().getCddb();
final Optional<String> dTitle = album.getProperty("DTITLE");
if (!albumCddb.matches(requestedCddb, trackOffsetsMatchThreshold))
{
synchronized (log) // keep log lines together
{
log.info(">>>> discarded alternate title because of mismatching track offsets: {}", dTitle);
log.debug(">>>>>>>> found track offsets: {}", albumCddb.getTrackFrameOffsets());
log.debug(">>>>>>>> searched track offsets: {}", requestedCddb.getTrackFrameOffsets());
log.debug(">>>>>>>> ppm {}", albumCddb.computeDifference(requestedCddb));
}
return Optional.empty();
}
return dTitle;
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
@Nonnull
private static List<Attribute> getAttributes (@Nonnull final Relation relation)
{
final List<Attribute> attributes = new ArrayList<>();
if (relation.getAttributeList() != null)
{
attributes.addAll(relation.getAttributeList().getAttribute());
}
return attributes;
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
@Nonnull
private static ModelBuilder createModelBuilder()
{
return new ModelBuilder(SOURCE_MUSICBRAINZ);
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
@Nonnull
private static IRI artistIriOf (@Nonnull final String id)
{
return BMMO.artistIriFor(createSha1IdNew(musicBrainzIriFor("artist", id).stringValue()));
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
@Nonnull
private static IRI trackIriOf (@Nonnull final String id)
{
return BMMO.trackIriFor(createSha1IdNew(musicBrainzIriFor("track", id).stringValue()));
}
/*******************************************************************************************************************
*
* FIXME: DUPLICATED FROM EmbbededAudioMetadataImporter
*
******************************************************************************************************************/
@Nonnull
private static IRI recordIriOf (@Nonnull final Metadata metadata, @Nonnull final String recordTitle)
{
final Optional<Cddb> cddb = metadata.get(CDDB);
return BMMO.recordIriFor(cddb.map(value -> createSha1IdNew(value.getToc()))
.orElseGet(() -> createSha1IdNew("RECORD:" + recordTitle)));
}
/*******************************************************************************************************************
*
*
******************************************************************************************************************/
@Nonnull
private IRI signalIriFor (@Nonnull final Cddb cddb, @Nonnegative final int trackNumber)
{
return BMMO.signalIriFor(createSha1IdNew(cddb.getToc() + "/" + trackNumber));
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
@Nonnull
private static IRI performanceIriFor (@Nonnull final String id)
{
return BMMO.performanceIriFor(createSha1IdNew(musicBrainzIriFor("performance", id).stringValue()));
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
@Nonnull
private static IRI musicBrainzIriFor (@Nonnull final String resourceType, @Nonnull final String id)
{
return FACTORY.createIRI(String.format("http://musicbrainz.org/%s/%s", resourceType, id));
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
@Nonnull
private static IRI predicateFor (@Nonnull final String role)
{
return Objects.requireNonNull(PERFORMER_MAP.get(role.toLowerCase()), "Cannot map role: " + role);
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
private static int scoreOf (@Nonnull final ReleaseGroup releaseGroup)
{
return Integer.parseInt(releaseGroup.getOtherAttributes().get(QNAME_SCORE));
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
private void logArtists (@Nonnull final ReleaseGroup releaseGroup)
{
log.debug(">>>> {} {} {} artist: {}",
releaseGroup.getOtherAttributes().get(QNAME_SCORE),
releaseGroup.getId(),
releaseGroup.getTitle(),
releaseGroup.getArtistCredit().getNameCredit().stream().map(nc -> nc.getArtist().getName()).collect(toList()));
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
@Nonnull
private static Optional<Integer> emptyIfOne (@Nonnull final Optional<Integer> number)
{
return number.flatMap(n -> (n == 1) ? Optional.empty() : Optional.of(n));
}
/*******************************************************************************************************************
*
*
*
******************************************************************************************************************/
@Nonnull
private static String toString (@Nonnull final Attribute attribute)
{
return String.format("%s %s (%s)", attribute.getContent(), attribute.getCreditedAs(), attribute.getValue());
}
}
|
zhangannyi/practise-of-springcloud-and-docker
|
ribbon-service/src/main/java/fun/liwudi/ribbonservice/service/impl/HelloServiceImpl.java
|
package fun.liwudi.ribbonservice.service.impl;
import fun.liwudi.ribbonservice.service.HelloService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
import javax.xml.ws.Action;
/**
* @author 李武第
*/
@Service
public class HelloServiceImpl implements HelloService{
@Autowired
RestTemplate restTemplate;
@Override
public String hiService(String name) {
return restTemplate.getForObject("http://EUREKA-CLIENT/hi?name="+name,String.class);
}
}
|
HoneyLyrics/Front
|
src/components/mood-list/AdditionFeatures.js
|
import React, { useState } from 'react';
import styled from 'styled-components';
import MelonLink from '../common/MelonLink';
import { BsHeart, BsHeartFill, BsMusicNoteList } from 'react-icons/bs';
import { AiOutlineCheck } from 'react-icons/ai';
const AdditionFeaturesBlock = styled.div`
width: 80px;
display: flex;
flex-direction: column;
justify-content: space-around;
align-items: center;
font-family: 'Noto Sans', sans-serif;
&:hover {
cursor: pointer;
}
@media (max-width: 768px) {
width: 175px;
position: relative;
bottom: 38px;
left: 263px;
flex-direction: row;
justify-content: space-between;
align-items: flex-start;
}
@media (max-width: 480px) {
width: 30px;
bottom: 35px;
left: 195px;
div:nth-child(2) {
display: none;
}
a {
display: none;
}
}
`;
const AdditionBtn = styled.div`
&:first-child {
padding-top: 2px;
}
&:nth-child(2) {
padding-top: 1px;
}
svg {
width: 25px;
height: 25px;
}
@media (max-width: 480px) {
&:first-child {
padding-top: 0;
}
}
`;
const AdditionFeatures = ({ songId }) => {
const [isLike, setIsLike] = useState(false);
const [isPlaylist, setIsPlaylist] = useState(false);
const handleIsLike = () => {
setIsLike(isLike => !isLike);
};
const handleIsPlaylist = () => {
setIsPlaylist(isPlaylist => !isPlaylist);
};
return (
<AdditionFeaturesBlock>
<AdditionBtn onClick={handleIsLike}>
{isLike ? <BsHeartFill color="ff033e" /> : <BsHeart color="ff033e" />}
</AdditionBtn>
<AdditionBtn onClick={handleIsPlaylist}>
{isPlaylist ? <AiOutlineCheck /> : <BsMusicNoteList />}
</AdditionBtn>
<MelonLink songId={songId} />
</AdditionFeaturesBlock>
);
};
export default AdditionFeatures;
|
mslovy/barrelfish
|
kernel/include/arch/armv7-m/start_aps.h
|
/**
* \file
* \brief Definitions for the startup of application processors.
*
* This file contains the prototypes for the functions which start
* the application processors
*/
/*
* Copyright (c) 2007, 2008, 2010, ETH Zurich.
* All rights reserved.
*
* This file is distributed under the terms in the attached LICENSE file.
* If you do not find this file, copies can be found by writing to:
* ETH Zurich D-INFK, Haldeneggsteig 4, CH-8092 Zurich. Attn: Systems Group.
*/
#ifndef START_APS_H_
#define START_APS_H_
#if defined(__gem5__)
#define AP_STARTING_UP 1
#define AP_STARTED 2
//#define AP_LOCK_PHYS 0x20000
#define AP_WAIT_PHYS 0x20000
#define AP_GLOBAL_PHYS 0x21000
#elif defined(__pandaboard__)
#define AP_STARTING_UP 4422
#define AP_STARTED 6633
#define AP_WAIT_PHYS ((lpaddr_t)0x80020000)
#define AP_GLOBAL_PHYS ((lpaddr_t)0x80021000)
#define AUX_CORE_BOOT_0 ((lpaddr_t)0x48281800)
#define AUX_CORE_BOOT_1 ((lpaddr_t)0x48281804)
// address of the section needed to map AUX_CORE vars
#define AUX_CORE_BOOT_SECT (AUX_CORE_BOOT_0 & ~ARM_L1_SECTION_MASK)
// offset of AUX_CORE_BOOT_0 in the section
#define AUX_CORE_BOOT_0_OFFSET (AUX_CORE_BOOT_0 & ARM_L1_SECTION_MASK)
// offset of AUX_CORE_BOOT_1 in the section
#define AUX_CORE_BOOT_1_OFFSET (AUX_CORE_BOOT_1 & ARM_L1_SECTION_MASK)
#else
#error "Unknown ARM arch"
#endif
int start_aps_arm_start(uint8_t core_id, lvaddr_t entry);
#endif // START_APS_H_
|
PY-GZKY/Tplan
|
backend/app/app/user_menu/crud_user.py
|
<reponame>PY-GZKY/Tplan
from typing import Optional
from sqlalchemy.orm import Session
from app.user_menu.base import CRUDBase
from app.models import User
from app.schemas import UserCreate, UserUpdate
from app.security.security import verify_password
class CRUDUser(CRUDBase[User, UserCreate, UserUpdate]):
def get_by_username(self, db: Session, *, username: str) -> Optional[User]:
return db.query(User).filter(User.username == username).first()
def authenticate(self, db: Session, *, username: str, password: str) -> Optional[User]:
user = self.get_by_username(db, username=username)
if not user:
return None
if not verify_password(password, user.hashed_password):
return None
return user
def is_active(self, user: User) -> bool:
return user.is_active
def is_superuser(self, user: User) -> bool:
return user.is_superuser
user = CRUDUser(User)
|
eea/volto-eea-design-system
|
src/ui/Media/Image.stories.js
|
<reponame>eea/volto-eea-design-system
import React from 'react';
import { Image } from 'semantic-ui-react';
import banner from '@eeacms/volto-eea-design-system/../theme/themes/eea/assets/images/banner.png';
export default {
title: 'Components/Media Container/Image',
component: Image,
parameters: {
actions: {
handles: ['click'],
},
},
argTypes: {
size: {
control: { type: 'inline-radio' },
options: [
'mini',
'tiny',
'small',
'medium',
'large',
'big',
'huge',
'massive',
],
description: 'an image can vary in size',
table: {
type: { summary: 'string' },
defaultValue: { summary: ' "" ' },
},
},
spaced: {
control: { type: 'inline-radio' },
options: ['left', 'right'],
description:
'an image can specify that it needs an additional spacing to separate it from nearby content',
table: {
type: { summary: 'bool|string' },
defaultValue: { summary: ' "" ' },
},
},
floated: {
control: { type: 'inline-radio' },
options: ['left', 'right'],
description: 'an image can sit to the left or right of other content',
table: {
type: { summary: 'string' },
defaultValue: { summary: ' "" ' },
},
},
src: {
description: 'Path or Url of the image',
table: {
type: { summary: 'string' },
defaultValue: { summary: ' "" ' },
},
},
},
};
export const Default = (args) => <Image {...args} alt="image" />;
Default.args = {
src: banner,
};
export const Link = (args) => <Image {...args} alt="link image" />;
Link.args = {
src: banner,
size: 'small',
as: 'a',
href: 'https://www.eea.europa.eu/logo.jpg',
hidden: false,
disabled: false,
avatar: false,
bordered: false,
fluid: false,
rounded: false,
circular: false,
centered: false,
spaced: false,
floated: 'left',
};
Link.argTypes = {
hidden: {
description: 'an image can be hidden',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
disabled: {
description: 'an image can show that it is disabled and cannot be selected',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
avatar: {
description: 'Path or Url of the image',
table: {
type: { summary: 'string' },
defaultValue: { summary: ' "" ' },
},
},
bordered: {
description:
'an image may include a border to emphasize the edges of white or transparent content',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
fluid: {
description: 'an image can take up the width of its container',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
rounded: {
description: 'an image may appear rounded',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
circular: {
description: 'an image may appear circular',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
centered: {
description: 'an image can appear centered in a content block',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
as: {
description: 'an element type to render as (string or function)',
table: {
type: { summary: 'elementType' },
defaultValue: { summary: 'img' },
},
},
href: {
description: 'renders the Image as an <a> tag with this href',
table: {
type: { summary: 'string' },
defaultValue: { summary: ' "" ' },
},
},
};
Link.storyName = 'Link Image';
export const Playground = (args) => <Image {...args} alt="playground image" />;
Playground.args = {
src: banner,
size: 'small',
hidden: false,
disabled: false,
avatar: false,
bordered: false,
fluid: false,
rounded: false,
circular: false,
centered: false,
spaced: false,
floated: 'left',
};
Playground.argTypes = {
hidden: {
description: 'an image can be hidden',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
disabled: {
description: 'an image can show that it is disabled and cannot be selected',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
avatar: {
description: 'Path or Url of the image',
table: {
type: { summary: 'string' },
defaultValue: { summary: ' "" ' },
},
},
bordered: {
description:
'an image may include a border to emphasize the edges of white or transparent content',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
fluid: {
description: 'an image can take up the width of its container',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
rounded: {
description: 'an image may appear rounded',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
circular: {
description: 'an image may appear circular',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
centered: {
description: 'an image can appear centered in a content block',
table: {
type: { summary: 'boolean' },
defaultValue: { summary: false },
},
},
};
|
chanikag/wso2-axiom-1
|
modules/axiom-dom/src/main/java/org/apache/axiom/soap/impl/dom/factory/DOMSOAPFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axiom.soap.impl.dom.factory;
import org.apache.axiom.om.OMDataSource;
import org.apache.axiom.om.OMNamespace;
import org.apache.axiom.om.OMXMLParserWrapper;
import org.apache.axiom.om.impl.dom.DocumentImpl;
import org.apache.axiom.om.impl.dom.factory.OMDOMFactory;
import org.apache.axiom.om.impl.dom.factory.OMDOMMetaFactory;
import org.apache.axiom.soap.SOAPBody;
import org.apache.axiom.soap.SOAPEnvelope;
import org.apache.axiom.soap.SOAPFactory;
import org.apache.axiom.soap.SOAPFault;
import org.apache.axiom.soap.SOAPFaultCode;
import org.apache.axiom.soap.SOAPFaultDetail;
import org.apache.axiom.soap.SOAPFaultNode;
import org.apache.axiom.soap.SOAPFaultReason;
import org.apache.axiom.soap.SOAPFaultRole;
import org.apache.axiom.soap.SOAPFaultSubCode;
import org.apache.axiom.soap.SOAPFaultText;
import org.apache.axiom.soap.SOAPFaultValue;
import org.apache.axiom.soap.SOAPHeader;
import org.apache.axiom.soap.SOAPHeaderBlock;
import org.apache.axiom.soap.SOAPMessage;
import org.apache.axiom.soap.SOAPProcessingException;
import org.apache.axiom.soap.SOAPVersion;
import org.apache.axiom.soap.impl.dom.SOAPEnvelopeImpl;
import org.apache.axiom.soap.impl.dom.SOAPMessageImpl;
import org.apache.axiom.soap.impl.dom.soap11.SOAP11FaultDetailImpl;
import org.apache.axiom.soap.impl.dom.soap12.SOAP12FaultImpl;
public class DOMSOAPFactory extends OMDOMFactory implements SOAPFactory {
public DOMSOAPFactory(OMDOMMetaFactory metaFactory) {
super(metaFactory);
}
public DOMSOAPFactory() {
}
public DOMSOAPFactory(DocumentImpl doc) {
super(doc);
}
public String getSoapVersionURI() {
throw new UnsupportedOperationException();
}
public SOAPVersion getSOAPVersion() {
throw new UnsupportedOperationException();
}
public SOAPMessage createSOAPMessage(OMXMLParserWrapper builder) {
SOAPMessageImpl messageImpl = new SOAPMessageImpl(builder, this);
this.document = messageImpl;
return messageImpl;
}
public SOAPMessage createSOAPMessage(SOAPEnvelope envelope, OMXMLParserWrapper parserWrapper) {
SOAPMessageImpl messageImpl = new SOAPMessageImpl(envelope, parserWrapper, this);
this.document = messageImpl;
return messageImpl;
}
public SOAPEnvelope createSOAPEnvelope(OMXMLParserWrapper builder) {
return new SOAPEnvelopeImpl((DocumentImpl) this.createOMDocument(), builder, this);
}
public SOAPEnvelope createSOAPEnvelope() throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPEnvelope createSOAPEnvelope(OMNamespace ns) {
throw new UnsupportedOperationException();
}
public SOAPHeader createSOAPHeader(SOAPEnvelope envelope) throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPHeader createSOAPHeader(SOAPEnvelope envelope, OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPHeaderBlock createSOAPHeaderBlock(String localName, OMNamespace ns,
SOAPHeader parent) throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPHeaderBlock createSOAPHeaderBlock(String localName, OMNamespace ns,
SOAPHeader parent, OMXMLParserWrapper builder)
throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFault createSOAPFault(SOAPBody parent, Exception e) throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFault createSOAPFault(SOAPBody parent) throws SOAPProcessingException {
return new SOAP12FaultImpl(parent, this);
}
public SOAPFault createSOAPFault(SOAPBody parent, OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPBody createSOAPBody(SOAPEnvelope envelope) throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPBody createSOAPBody(SOAPEnvelope envelope, OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPFaultCode createSOAPFaultCode(SOAPFault parent) throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFaultCode createSOAPFaultCode(SOAPFault parent, OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPFaultValue createSOAPFaultValue(SOAPFaultCode parent)
throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFaultValue createSOAPFaultValue(SOAPFaultCode parent, OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPFaultValue createSOAPFaultValue(SOAPFaultSubCode parent)
throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFaultValue createSOAPFaultValue(SOAPFaultSubCode parent,
OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPFaultSubCode createSOAPFaultSubCode(SOAPFaultCode parent)
throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFaultSubCode createSOAPFaultSubCode(SOAPFaultCode parent,
OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPFaultSubCode createSOAPFaultSubCode(SOAPFaultSubCode parent)
throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFaultSubCode createSOAPFaultSubCode(SOAPFaultSubCode parent,
OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPFaultReason createSOAPFaultReason(SOAPFault parent) throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFaultReason createSOAPFaultReason(SOAPFault parent, OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPFaultText createSOAPFaultText(SOAPFaultReason parent)
throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFaultText createSOAPFaultText(SOAPFaultReason parent, OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPFaultNode createSOAPFaultNode(SOAPFault parent) throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFaultNode createSOAPFaultNode(SOAPFault parent, OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPFaultRole createSOAPFaultRole(SOAPFault parent) throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFaultRole createSOAPFaultRole(SOAPFault parent, OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPFaultDetail createSOAPFaultDetail(SOAPFault parent) throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPFaultDetail createSOAPFaultDetail(SOAPFault parent, OMXMLParserWrapper builder) {
throw new UnsupportedOperationException();
}
public SOAPEnvelope getDefaultEnvelope() throws SOAPProcessingException {
throw new UnsupportedOperationException();
}
public SOAPEnvelope getDefaultFaultEnvelope() throws SOAPProcessingException {
SOAPEnvelope defaultEnvelope = getDefaultEnvelope();
SOAPFault fault = createSOAPFault(defaultEnvelope.getBody());
SOAPFaultCode faultCode = createSOAPFaultCode(fault);
createSOAPFaultValue(faultCode);
SOAPFaultReason reason = createSOAPFaultReason(fault);
createSOAPFaultText(reason);
createSOAPFaultNode(fault);
createSOAPFaultRole(fault);
createSOAPFaultDetail(fault);
return defaultEnvelope;
}
public SOAPMessage createSOAPMessage() {
return new SOAPMessageImpl(this);
}
public SOAPHeader createSOAPHeader() throws SOAPProcessingException {
throw new UnsupportedOperationException("TODO");
}
public SOAPHeaderBlock createSOAPHeaderBlock(String localName, OMNamespace ns)
throws SOAPProcessingException {
throw new UnsupportedOperationException("TODO");
}
public SOAPFault createSOAPFault() throws SOAPProcessingException {
return null;
}
public SOAPBody createSOAPBody() throws SOAPProcessingException {
throw new UnsupportedOperationException("TODO");
}
public SOAPFaultCode createSOAPFaultCode() throws SOAPProcessingException {
throw new UnsupportedOperationException("TODO");
}
public SOAPFaultValue createSOAPFaultValue() throws SOAPProcessingException {
throw new UnsupportedOperationException("TODO");
}
public SOAPFaultSubCode createSOAPFaultSubCode() throws SOAPProcessingException {
throw new UnsupportedOperationException("TODO");
}
public SOAPFaultReason createSOAPFaultReason() throws SOAPProcessingException {
throw new UnsupportedOperationException("TODO");
}
public SOAPFaultText createSOAPFaultText() throws SOAPProcessingException {
throw new UnsupportedOperationException("TODO");
}
public SOAPFaultNode createSOAPFaultNode() throws SOAPProcessingException {
throw new UnsupportedOperationException("TODO");
}
public SOAPFaultRole createSOAPFaultRole() throws SOAPProcessingException {
throw new UnsupportedOperationException("TODO");
}
public SOAPFaultDetail createSOAPFaultDetail() throws SOAPProcessingException {
return new SOAP11FaultDetailImpl(this);
}
public OMNamespace getNamespace() {
throw new UnsupportedOperationException();
}
public SOAPHeaderBlock createSOAPHeaderBlock(String localName, OMNamespace ns, OMDataSource ds) throws SOAPProcessingException {
throw new UnsupportedOperationException("TODO");
}
}
|
1shenxi/webpack
|
test/configCases/dll-plugin/1-issue-10475/index.js
|
<filename>test/configCases/dll-plugin/1-issue-10475/index.js
it("should have all modules", () => {
require("dll/index.js");
});
|
ALACRITIC/grapesjs
|
src/domain_abstract/ui/InputColor.js
|
<reponame>ALACRITIC/grapesjs
var Backbone = require('backbone');
var Input = require('./Input');
var Spectrum = require('spectrum-colorpicker');
module.exports = Input.extend({
template: _.template(`
<div class='<%= ppfx %>input-holder'></div>
<div class="<%= ppfx %>field-colorp">
<div class="<%= ppfx %>field-colorp-c">
<div class="<%= ppfx %>checker-bg"></div>
</div>
</div>`),
initialize(opts) {
Input.prototype.initialize.apply(this, arguments);
var ppfx = this.ppfx;
this.colorCls = ppfx + 'field-color-picker';
this.inputClass = ppfx + 'field ' + ppfx + 'field-color';
this.colorHolderClass = ppfx + 'field-colorp-c';
this.listenTo(this.model, 'change:value', this.handleModelChange);
},
/**
* Updates the view when the model is changed
* */
handleModelChange(...args) {
Input.prototype.handleModelChange.apply(this, args);
var value = this.model.get('value');
var colorEl = this.getColorEl();
// If no color selected I will set white for the picker
value = value === 'none' ? '#fff' : value;
colorEl.spectrum('set', value);
colorEl.get(0).style.backgroundColor = value;
},
/**
* Get the color input element
* @return {HTMLElement}
*/
getColorEl() {
if(!this.colorEl) {
var model = this.model;
var colorEl = $('<div>', {class: this.colorCls});
var cpStyle = colorEl.get(0).style;
var elToAppend = this.target && this.target.config ? this.target.config.el : '';
colorEl.spectrum({
appendTo: elToAppend || 'body',
maxSelectionSize: 8,
showPalette: true,
showAlpha: true,
chooseText: 'Ok',
cancelText: '⨯',
palette: [],
move(color) {
var c = color.getAlpha() == 1 ? color.toHexString() : color.toRgbString();
cpStyle.backgroundColor = c;
},
change(color) {
var c = color.getAlpha() == 1 ? color.toHexString() : color.toRgbString();
c = c.replace(/ /g,'');
cpStyle.backgroundColor = c;
model.set('value', c);
}
});
this.colorEl = colorEl;
}
return this.colorEl;
},
render(...args) {
Input.prototype.render.apply(this, args);
this.$el.find('.' + this.colorHolderClass).html(this.getColorEl());
return this;
}
});
|
Knutakir/KTT
|
docs/search/enumvalues_4.js
|
<gh_stars>0
var searchData=
[
['float_596',['Float',['../namespacektt.html#a79871821a23eee2b543fec77b52c54d7a22ae0e2b89e5e3d477f988cc36d3272b',1,'ktt']]],
['fullsearch_597',['FullSearch',['../namespacektt.html#a5f5b49f1e11331a499aa44fb1fa6788ba4774784e057c4bbae83302ed0de38df6',1,'ktt']]]
];
|
Advencher/web_GIS
|
public/js/pigmentsPage/gridFTWatch.js
|
<filename>public/js/pigmentsPage/gridFTWatch.js
//$(document).ready(function () {
var gridFT,
dataSource = '/pigmentsview';
if (typeof pigment_sid !== 'undefined' && pigment_sid !== -1) {
console.log('get sample finded');
dataSource = '/pigmentsview?sid='+pigment_sid;
console.log(dataSource);
}
gridFT = $('#grid').grid({
dataSource: dataSource,
uiLibrary: 'bootstrap4',
headerRowHeight: 'fixed',
bodyRowHeight: 'fixed',
fontSize: '12px',
primaryKey: 'ID',
columns: [
{ field: 'ID', width: 44, sortable: true },
{ field: 'Station', title:'Станция', sortable: true, width: 120},
{ field: 'Date', title:'Дата', type: 'date', sortable: true, format: 'yyyy/dd/mm', width: 90 },
{ field: 'SerialNumber', title:'С. №', width: 55},
{ field: 'ChlA', title:'Хлор. A', tooltip: 'Хлорофил A', width: 55},
{ field: 'ChlB', title:'Хлор. B', tooltip: 'Хлорофил B', width: 55},
{ field: 'ChlC', title:'Хлор. C', tooltip: 'Хлорофил C', width: 55},
{ field: 'TropCh', title:'ТХ', type: 'dropdown', editField: 'TropId', width: 40},
{ field: 'VolumeOfPW', title:'ОПВ', tooltip: 'Объем профильтрованной воды', width: 55},
{ field: 'A665k', title:'A(665k)', tooltip: 'A(665k)', width: 55},
{ field: 'Pigmentindex', title:'ПИ', tooltip: 'Пигментный индекс', width: 55},
{ field: 'Pheopigments', title:'ФП', tooltip: 'Феопигменты', width: 55},
{ field: 'Comment', title:'Коммент.', tooltip: 'Комментарий', width: 120}
//{ field: 'StationId', hidden: true}
],
detailTemplate: '<div><b>Широта:</b> {Latitude} <b>Долгота:</b> {Longitude}</div>',
icons: {
expandRow: '<i class="fa fa-plus" aria-hidden="true"></i>',
collapseRow: '<i class="fa fa-minus" aria-hidden="true"></i>'
},
//autoBind: false,
pager: { limit: 10, sizes: [5, 10, 15, 20] }
});
|
jsoagger/jsoagger-fx
|
jsoagger-jfxcore-engine/src/main/java/io/github/jsoagger/jfxcore/engine/components/input/InputText.java
|
<filename>jsoagger-jfxcore-engine/src/main/java/io/github/jsoagger/jfxcore/engine/components/input/InputText.java
/*-
* ========================LICENSE_START=================================
* JSoagger
* %%
* Copyright (C) 2019 JSOAGGER
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =========================LICENSE_END==================================
*/
package io.github.jsoagger.jfxcore.engine.components.input;
import java.util.Optional;
import com.jfoenix.controls.JFXPasswordField;
import com.jfoenix.controls.JFXTextField;
import io.github.jsoagger.core.utils.StringUtils;
import io.github.jsoagger.jfxcore.api.IJSoaggerController;
import io.github.jsoagger.jfxcore.viewdef.json.xml.XMLConstants;
import io.github.jsoagger.jfxcore.viewdef.json.xml.model.VLViewComponentXML;
import javafx.beans.binding.Bindings;
import javafx.scene.Node;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
/**
* @author <NAME>
* @mailto <EMAIL>
* @date 2019
*/
public class InputText extends AbstractInputComponent {
private TextField textField = null;
/**
* Constructor
*/
public InputText() {
super();
}
/**
* @{inheritedDoc}
*/
@Override
public void buildFrom(IJSoaggerController controller, VLViewComponentXML configuration) {
final String ispasswordField = configuration.getPropertyValue("passwordField");
newInstance(StringUtils.isNotBlank(ispasswordField) && Boolean.valueOf(ispasswordField));
super.buildFrom(controller, configuration);
textField.setId(id);
configure();
textField.setText(owner.getInitialInternalValue());
Bindings.bindBidirectional(owner.currentInternalValueProperty(), textField.textProperty(),
owner.getConverter());
}
/**
* {@inheritDoc}
*/
@Override
public void destroy() {
super.destroy();
textField.textProperty().unbind();
}
/**
* @{inheritedDoc}
*/
@Override
public void setText(String value) {
textField.setText(value);
}
private void newInstance(boolean ispasswordField) {
if (ispasswordField) {
textField = new JFXPasswordField();
textField.getStyleClass().remove("custom-text-field");
textField.getStyleClass().remove("jfx-text-field");
// textField.getStyleClass().remove("text-field");
textField.getStyleClass().remove("text-input");
} else {
textField = new JFXTextField();
textField.getStyleClass().remove("custom-text-field");
// textField.getStyleClass().remove("text-field");
textField.getStyleClass().remove("text-input");
}
}
/**
* @{inheritedDoc}
*/
@Override
public void addDisplayBinding(Label label) {
super.addDisplayBinding(label);
label.textProperty().bind(textField.textProperty());
}
public void configure() {
final Optional<String> prompt = configuration.propertyValueOf(XMLConstants.PROMPT);
final Optional<Boolean> isAttributeGenerated =
configuration.booleanPropertyValueOf(XMLConstants.GENERATED);
final Optional<Boolean> isFocused = configuration.booleanPropertyValueOf(XMLConstants.FOCUSED);
// focus
isFocused.filter(e -> e == Boolean.TRUE).flatMap(e -> {
textField.requestFocus();
return Optional.empty();
});
// generated value
isAttributeGenerated.filter(e -> e == Boolean.TRUE).flatMap(e -> {
textField.setDisable(true);
return Optional.empty();
});
// prompt
prompt.ifPresent(e -> {
String p = prompt.get();
if (StringUtils.isNotBlank(p)) {
final String val = controller.getLocalised(prompt.get());
textField.setPromptText(val);
}
});
// process DisplayConfig
final Optional<VLViewComponentXML> displayConfig =
configuration.getComponentById(DISPLAY_CONFIG);
displayConfig.ifPresent(e -> {
e.booleanPropertyValueOf(XMLConstants.READ_ONLY).ifPresent(f -> {
textField.setDisable(f.booleanValue());
});
});
processFormat();
}
/**
* Process format section
*/
private void processFormat() {
final VLViewComponentXML format = configuration.getComponentById("Format").orElse(null);
if (format != null) {
final String upperCase = format.getPropertyValue(XMLConstants.UPPERCASE);
final String number = format.getPropertyValue(XMLConstants.NUMBER);
final String capitalize = format.getPropertyValue(XMLConstants.CAPITALIZE);
final String capitalizeAll = format.getPropertyValue(XMLConstants.CAPITALIZE_ALL);
final String maxLength = format.getPropertyValue(XMLConstants.MAX_LENGTH);
textField.textProperty().addListener((ov, oldValue, newValue) -> {
if (StringUtils.isNotBlank(upperCase)) {
owner.currentInternalValueProperty().set(newValue.toUpperCase());
}
if (StringUtils.isNotBlank(number)) {
if (newValue.matches("\\d*")) {
Integer.parseInt(newValue);
} else {
owner.currentInternalValueProperty().set(oldValue);
}
}
if (StringUtils.isNotBlank(capitalize)) {
if (StringUtils.isNotBlank(owner.getCurrentInternalValue())) {
owner.currentInternalValueProperty()
.set(StringUtils.capitalize(owner.getCurrentInternalValue()));
}
}
if (StringUtils.isNotBlank(capitalizeAll)) {
if (StringUtils.isNotBlank(owner.getCurrentInternalValue())) {
owner.currentInternalValueProperty()
.set(StringUtils.capitalize(owner.getCurrentInternalValue()));
}
}
if (StringUtils.isNotBlank(maxLength)) {
try {
final int len = Integer.parseInt(maxLength);
if (StringUtils.isNotBlank(textField.getText())) {
if (textField.lengthProperty().get() > len) {
textField.setText(oldValue);
}
}
} catch (final Exception e) {
}
}
});
}
}
/**
* @{inheritedDoc}
*/
@Override
public Node getDisplay() {
return textField;
}
/**
* @{inheritedDoc}
*/
@Override
public Node getComponent() {
return textField;
}
}
|
aleroddepaz/java-samples
|
jetty-seed/src/test/java/org/arp/servlet/LoginServletTest.java
|
<gh_stars>0
package org.arp.servlet;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.IOException;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import org.junit.Test;
public class LoginServletTest extends BaseServletTest {
private LoginServlet servlet = new LoginServlet();
@Test
public void testDoGet() throws ServletException, IOException {
RequestDispatcher dispatcher = spy(RequestDispatcher.class);
when(request.getRequestDispatcher(LoginServlet.VIEW_NAME)).thenReturn(dispatcher);
servlet.doGet(request, response);
verify(dispatcher).forward(eq(request), eq(response));
}
}
|
zxcwhale/android_gpsbd
|
hardware/libgps/asn-supl/SupportedWLANInfo.h
|
<reponame>zxcwhale/android_gpsbd
/*
* Generated by asn1c-0.9.28 (http://lionet.info/asn1c)
* From ASN.1 module "Ver2-ULP-Components"
* found in "../ver2-ulp-components.asn"
* `asn1c -fcompound-names -gen-PER`
*/
#ifndef _SupportedWLANInfo_H_
#define _SupportedWLANInfo_H_
#include <asn_application.h>
/* Including external dependencies */
#include <BOOLEAN.h>
#include <constr_SEQUENCE.h>
#ifdef __cplusplus
extern "C" {
#endif
/* SupportedWLANInfo */
typedef struct SupportedWLANInfo {
BOOLEAN_t apTP;
BOOLEAN_t apAG;
BOOLEAN_t apSN;
BOOLEAN_t apDevType;
BOOLEAN_t apRSSI;
BOOLEAN_t apChanFreq;
BOOLEAN_t apRTD;
BOOLEAN_t setTP;
BOOLEAN_t setAG;
BOOLEAN_t setSN;
BOOLEAN_t setRSSI;
BOOLEAN_t apRepLoc;
/*
* This type is extensible,
* possible extensions are below.
*/
/* Context for parsing across buffer boundaries */
asn_struct_ctx_t _asn_ctx;
} SupportedWLANInfo_t;
/* Implementation */
extern asn_TYPE_descriptor_t asn_DEF_SupportedWLANInfo;
#ifdef __cplusplus
}
#endif
#endif /* _SupportedWLANInfo_H_ */
#include <asn_internal.h>
|
tiger0132/code-backup
|
Codeforces/1000~1999/1381/C.cpp
|
#include <algorithm>
#include <cstdio>
#include <cstring>
#include <functional>
#include <queue>
#include <utility>
#include <vector>
typedef std::pair<int, int> pii;
const int N = 1e5 + 51;
std::vector<int> v[N];
struct cmp {
bool operator()(int x, int y) const { return x[v].size() < y[v].size(); }
};
std::priority_queue<int, std::vector<int>, cmp> pq((cmp()));
int n, x, y, a[N], b[N];
int main() {
for (scanf("%*d"); ~scanf("%d%d%d", &n, &x, &y);) {
y -= x;
int bad;
for (int i = 1; i <= n; i++) {
scanf("%d", a + i), a[i][v].push_back(i);
}
for (int i = 1; i <= n + 1; i++) {
pq.push(i);
if (i[v].empty()) bad = i;
}
for (int i = 1, id; i <= x; i++) {
id = pq.top(), pq.pop();
id[v].back()[b] = id;
id[v].pop_back();
pq.push(id);
}
std::vector<pii> idx;
int cnt1 = pq.top()[v].size();
for (int id; !pq.empty();) {
id = pq.top(), pq.pop();
for (int j : id[v]) idx.push_back({id, j});
}
int siz = idx.size();
int rem = n - x - y;
if (y > 2 * (n - x) - 2 * cnt1) {
puts("NO");
goto end;
}
for (int j = 0; j < siz; j++) idx[j].second[b] = idx[(j + cnt1) % siz].first;
for (int j = 0; j < siz; j++)
if (idx[j].second[b] == idx[j].second[a]) idx[j].second[b] = bad, rem--;
for (int j = 0; rem; j++)
if (idx[j].second[b] != bad) idx[j].second[b] = bad, rem--;
puts("YES");
for (int i = 1; i <= n; i++) printf("%d%c", b[i], " \n"[i == n]), b[i] = 0;
end:
for (int i = 1; i <= n + 1; i++) i[v].clear();
}
}
|
guilhermejccavalcanti/Achilles
|
achilles-cql/src/test/java/info/archinnov/achilles/entity/manager/PersistenceManagerTest.java
|
/**
*
* Copyright (C) 2012-2013 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.entity.manager;
import static info.archinnov.achilles.type.ConsistencyLevel.EACH_QUORUM;
import static info.archinnov.achilles.type.ConsistencyLevel.LOCAL_QUORUM;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import info.archinnov.achilles.context.ConfigurationContext;
import info.archinnov.achilles.context.DaoContext;
import info.archinnov.achilles.context.PersistenceContext;
import info.archinnov.achilles.context.PersistenceContextFactory;
import info.archinnov.achilles.entity.metadata.EntityMeta;
import info.archinnov.achilles.entity.metadata.PropertyMeta;
import info.archinnov.achilles.entity.operations.EntityInitializer;
import info.archinnov.achilles.entity.operations.EntityProxifier;
import info.archinnov.achilles.entity.operations.EntityValidator;
import info.archinnov.achilles.entity.operations.SliceQueryExecutor;
import info.archinnov.achilles.query.cql.NativeQueryBuilder;
import info.archinnov.achilles.query.slice.SliceQueryBuilder;
import info.archinnov.achilles.query.typed.TypedQueryBuilder;
import info.archinnov.achilles.query.typed.TypedQueryValidator;
import info.archinnov.achilles.test.builders.CompleteBeanTestBuilder;
import info.archinnov.achilles.test.mapping.entity.CompleteBean;
import info.archinnov.achilles.type.ConsistencyLevel;
import info.archinnov.achilles.type.IndexCondition;
import info.archinnov.achilles.type.Options;
import info.archinnov.achilles.type.OptionsBuilder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.math.RandomUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import org.powermock.reflect.Whitebox;
import com.datastax.driver.core.Session;
import com.google.common.collect.Sets;
@RunWith(MockitoJUnitRunner.class)
public class PersistenceManagerTest {
@Rule
public ExpectedException exception = ExpectedException.none();
@Mock
private EntityInitializer initializer;
@Mock
private EntityProxifier proxifier;
@Mock
private EntityValidator entityValidator;
@Mock
private TypedQueryValidator typedQueryValidator;
@Mock
private SliceQueryExecutor sliceQueryExecutor;
@Mock
private PersistenceManagerFactory pmf;
@Mock
private PersistenceContextFactory contextFactory;
@Mock
private DaoContext daoContext;
@Mock
private ConfigurationContext configContext;
@Mock
private PersistenceContext context;
@Mock
private Map<Class<?>, EntityMeta> entityMetaMap;
@Mock
private EntityMeta meta;
@Mock
private PropertyMeta idMeta;
@Captor
private ArgumentCaptor<Options> optionsCaptor;
private PersistenceManager manager;
private Long primaryKey = RandomUtils.nextLong();
private CompleteBean entity = CompleteBeanTestBuilder.builder().id(primaryKey).buid();
@Before
public void setUp() throws Exception {
when(contextFactory.newContext(eq(entity), optionsCaptor.capture())).thenReturn(context);
when(configContext.getDefaultReadConsistencyLevel()).thenReturn(ConsistencyLevel.EACH_QUORUM);
when(meta.getIdMeta()).thenReturn(idMeta);
manager = new PersistenceManager(entityMetaMap, contextFactory, daoContext, configContext);
manager = Mockito.spy(this.manager);
Whitebox.setInternalState(manager, EntityProxifier.class, proxifier);
Whitebox.setInternalState(manager, EntityValidator.class, entityValidator);
Whitebox.setInternalState(manager, SliceQueryExecutor.class, sliceQueryExecutor);
Whitebox.setInternalState(manager, TypedQueryValidator.class, typedQueryValidator);
Whitebox.setInternalState(manager, PersistenceContextFactory.class, contextFactory);
manager.setEntityMetaMap(entityMetaMap);
entityMetaMap.put(CompleteBean.class, meta);
}
@Test
public void should_persist() throws Exception {
when(proxifier.isProxy(entity)).thenReturn(false);
Mockito.doNothing().when(manager).intercept(Matchers.anyObject(), any(info.archinnov.achilles.interceptor.Event.class));
Mockito.doNothing().when(manager).intercept(Matchers.anyObject(), any(info.archinnov.achilles.interceptor.Event.class));
manager.persist(entity);
verify(entityValidator).validateEntity(entity, entityMetaMap);
verify(context).persist();
}
@Test
public void should_persist_with_options() throws Exception {
when(proxifier.isProxy(entity)).thenReturn(false);
manager.persist(entity, OptionsBuilder.withConsistency(EACH_QUORUM).withTtl(150).withTimestamp(100L));
verify(entityValidator).validateEntity(entity, entityMetaMap);
verify(context).persist();
Options value = optionsCaptor.getValue();
assertThat(value.getConsistencyLevel().get()).isEqualTo(EACH_QUORUM);
assertThat(value.getTtl().get()).isEqualTo(150);
assertThat(value.getTimestamp().get()).isEqualTo(100L);
}
@Test
public void should_exception_trying_to_persist_a_managed_entity() throws Exception {
when(proxifier.isProxy(entity)).thenReturn(true);
Mockito.doNothing().when(manager).intercept(Matchers.anyObject(), any(info.archinnov.achilles.interceptor.Event.class));
exception.expect(IllegalStateException.class);
exception.expectMessage("Then entity is already in \'managed\' state. Please use the merge() method instead of persist()");
manager.persist(entity);
}
@Test
public void should_merge() throws Exception {
when(context.merge(entity)).thenReturn(entity);
Mockito.doNothing().when(manager).intercept(Matchers.anyObject(), any(info.archinnov.achilles.interceptor.Event.class));
CompleteBean mergedEntity = manager.merge(entity);
verify(entityValidator).validateEntity(entity, entityMetaMap);
assertThat(mergedEntity).isSameAs(entity);
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().isPresent()).isFalse();
assertThat(options.getTtl().isPresent()).isFalse();
assertThat(options.getTimestamp().isPresent()).isFalse();
}
@Test
public void should_merge_with_options() throws Exception {
when(context.merge(entity)).thenReturn(entity);
CompleteBean mergedEntity = manager.merge(entity, OptionsBuilder.withConsistency(EACH_QUORUM).withTtl(150).withTimestamp(100L));
verify(entityValidator).validateEntity(entity, entityMetaMap);
assertThat(mergedEntity).isSameAs(entity);
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().get()).isEqualTo(EACH_QUORUM);
assertThat(options.getTtl().get()).isEqualTo(150);
assertThat(options.getTimestamp().get()).isEqualTo(100L);
}
@Test
public void should_remove() throws Exception {
when(proxifier.getRealObject(entity)).thenReturn(entity);
Mockito.doNothing().when(manager).intercept(Matchers.anyObject(), any(info.archinnov.achilles.interceptor.Event.class));
manager.remove(entity);
verify(entityValidator).validateEntity(entity, entityMetaMap);
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().isPresent()).isFalse();
assertThat(options.getTtl().isPresent()).isFalse();
assertThat(options.getTimestamp().isPresent()).isFalse();
}
@Test
public void should_remove_with_consistency() throws Exception {
when(proxifier.getRealObject(entity)).thenReturn(entity);
manager.remove(entity, OptionsBuilder.withConsistency(EACH_QUORUM));
verify(entityValidator).validateEntity(entity, entityMetaMap);
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().get()).isSameAs(EACH_QUORUM);
assertThat(options.getTtl().isPresent()).isFalse();
assertThat(options.getTimestamp().isPresent()).isFalse();
}
@Test
public void should_remove_by_id() throws Exception {
when(contextFactory.newContext(CompleteBean.class, primaryKey, OptionsBuilder.noOptions())).thenReturn(context);
when(context.getIdMeta()).thenReturn(idMeta);
manager.removeById(CompleteBean.class, primaryKey);
verify(entityValidator).validatePrimaryKey(idMeta, primaryKey);
verify(context).remove();
}
@Test
public void should_remove_by_id_with_consistency() throws Exception {
when(contextFactory.newContext(eq(CompleteBean.class), eq(primaryKey), optionsCaptor.capture())).thenReturn(context);
when(context.getIdMeta()).thenReturn(idMeta);
manager.removeById(CompleteBean.class, primaryKey, LOCAL_QUORUM);
verify(entityValidator).validatePrimaryKey(idMeta, primaryKey);
verify(context).remove();
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().get()).isSameAs(LOCAL_QUORUM);
assertThat(options.getTtl().isPresent()).isFalse();
assertThat(options.getTimestamp().isPresent()).isFalse();
}
@Test
public void should_find() throws Exception {
when(contextFactory.newContext(eq(CompleteBean.class), eq(primaryKey), optionsCaptor.capture())).thenReturn(context);
when(context.find(CompleteBean.class)).thenReturn(entity);
PropertyMeta idMeta = new PropertyMeta();
when(context.getIdMeta()).thenReturn(idMeta);
when(entityMetaMap.containsKey(CompleteBean.class)).thenReturn(true);
Mockito.doNothing().when(manager).intercept(Matchers.anyObject(), any(info.archinnov.achilles.interceptor.Event.class));
CompleteBean bean = manager.find(CompleteBean.class, primaryKey);
verify(entityValidator).validatePrimaryKey(idMeta, primaryKey);
assertThat(bean).isSameAs(entity);
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().isPresent()).isFalse();
assertThat(options.getTtl().isPresent()).isFalse();
assertThat(options.getTimestamp().isPresent()).isFalse();
}
@Test
public void should_find_with_consistency() throws Exception {
when(contextFactory.newContext(eq(CompleteBean.class), eq(primaryKey), optionsCaptor.capture())).thenReturn(context);
when(context.find(CompleteBean.class)).thenReturn(entity);
when(context.getIdMeta()).thenReturn(idMeta);
when(entityMetaMap.containsKey(CompleteBean.class)).thenReturn(true);
CompleteBean bean = manager.find(CompleteBean.class, primaryKey, EACH_QUORUM);
verify(entityValidator).validatePrimaryKey(idMeta, primaryKey);
assertThat(bean).isSameAs(entity);
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().get()).isSameAs(EACH_QUORUM);
assertThat(options.getTtl().isPresent()).isFalse();
assertThat(options.getTimestamp().isPresent()).isFalse();
}
@Test
public void should_get_reference() throws Exception {
when(contextFactory.newContext(eq(CompleteBean.class), eq(primaryKey), optionsCaptor.capture())).thenReturn(context);
when(context.getReference(CompleteBean.class)).thenReturn(entity);
when(context.getIdMeta()).thenReturn(idMeta);
when(entityMetaMap.containsKey(CompleteBean.class)).thenReturn(true);
Mockito.doNothing().when(manager).intercept(Matchers.anyObject(), any(info.archinnov.achilles.interceptor.Event.class));
CompleteBean bean = manager.getReference(CompleteBean.class, primaryKey);
verify(entityValidator).validatePrimaryKey(idMeta, primaryKey);
assertThat(bean).isSameAs(entity);
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().isPresent()).isFalse();
assertThat(options.getTtl().isPresent()).isFalse();
assertThat(options.getTimestamp().isPresent()).isFalse();
}
@Test
public void should_get_reference_with_consistency() throws Exception {
when(contextFactory.newContext(eq(CompleteBean.class), eq(primaryKey), optionsCaptor.capture())).thenReturn(context);
when(context.getReference(CompleteBean.class)).thenReturn(entity);
when(context.getIdMeta()).thenReturn(idMeta);
when(entityMetaMap.containsKey(CompleteBean.class)).thenReturn(true);
Mockito.doNothing().when(manager).intercept(Matchers.anyObject(), any(info.archinnov.achilles.interceptor.Event.class));
CompleteBean bean = manager.getReference(CompleteBean.class, primaryKey, EACH_QUORUM);
verify(entityValidator).validatePrimaryKey(idMeta, primaryKey);
assertThat(bean).isSameAs(entity);
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().get()).isSameAs(EACH_QUORUM);
assertThat(options.getTtl().isPresent()).isFalse();
assertThat(options.getTimestamp().isPresent()).isFalse();
}
@Test
public void should_refresh() throws Exception {
Mockito.doNothing().when(manager).intercept(Matchers.anyObject(), any(info.archinnov.achilles.interceptor.Event.class));
manager.refresh(entity);
verify(entityValidator).validateEntity(entity, entityMetaMap);
verify(proxifier).ensureProxy(entity);
verify(context).refresh();
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().isPresent()).isFalse();
assertThat(options.getTtl().isPresent()).isFalse();
assertThat(options.getTimestamp().isPresent()).isFalse();
}
@Test
public void should_refresh_with_consistency() throws Exception {
manager.refresh(entity, EACH_QUORUM);
verify(entityValidator).validateEntity(entity, entityMetaMap);
verify(proxifier).ensureProxy(entity);
verify(context).refresh();
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().get()).isSameAs(EACH_QUORUM);
assertThat(options.getTtl().isPresent()).isFalse();
assertThat(options.getTimestamp().isPresent()).isFalse();
}
@Test
public void should_initialize_entity() throws Exception {
when(context.initialize(entity)).thenReturn(entity);
CompleteBean actual = manager.initialize(entity);
verify(proxifier).ensureProxy(entity);
assertThat(actual).isSameAs(entity);
Options options = optionsCaptor.getValue();
assertThat(options.getConsistencyLevel().isPresent()).isFalse();
assertThat(options.getTtl().isPresent()).isFalse();
assertThat(options.getTimestamp().isPresent()).isFalse();
}
@Test
public void should_initialize_list_of_entities() throws Exception {
List<CompleteBean> entities = Arrays.asList(entity);
when(context.initialize(entity)).thenReturn(entity);
List<CompleteBean> actual = manager.initialize(entities);
assertThat(actual).containsExactly(entity);
}
@Test
public void should_initialize_set_of_entities() throws Exception {
Set<CompleteBean> entities = Sets.newHashSet(entity);
when(context.initialize(entity)).thenReturn(entity);
Set<CompleteBean> actual = manager.initialize(entities);
assertThat(actual).containsExactly(entity);
}
@Test
public void should_unwrap_entity() throws Exception {
when(proxifier.unwrap(entity)).thenReturn(entity);
CompleteBean actual = manager.unwrap(entity);
assertThat(actual).isSameAs(entity);
}
@Test
public void should_unwrap_list_of_entity() throws Exception {
List<CompleteBean> proxies = new ArrayList<CompleteBean>();
when(proxifier.unwrap(proxies)).thenReturn(proxies);
List<CompleteBean> actual = manager.unwrap(proxies);
assertThat(actual).isSameAs(proxies);
}
@Test
public void should_unwrap_set_of_entity() throws Exception {
Set<CompleteBean> proxies = new HashSet<CompleteBean>();
when(proxifier.unwrap(proxies)).thenReturn(proxies);
Set<CompleteBean> actual = manager.unwrap(proxies);
assertThat(actual).isSameAs(proxies);
}
@Test
public void should_init_and_unwrap_entity() throws Exception {
when(context.initialize(entity)).thenReturn(entity);
when(proxifier.unwrap(entity)).thenReturn(entity);
CompleteBean actual = manager.initAndUnwrap(entity);
assertThat(actual).isSameAs(entity);
}
@Test
public void should_init_and_unwrap_list_of_entities() throws Exception {
List<CompleteBean> entities = Arrays.asList(entity);
when(context.initialize(entities)).thenReturn(entities);
when(proxifier.unwrap(entities)).thenReturn(entities);
List<CompleteBean> actual = manager.initAndUnwrap(entities);
assertThat(actual).isSameAs(entities);
}
@Test
public void should_init_and_unwrap_set_of_entities() throws Exception {
Set<CompleteBean> entities = Sets.newHashSet(entity);
when(context.initialize(entities)).thenReturn(entities);
when(proxifier.unwrap(entities)).thenReturn(entities);
Set<CompleteBean> actual = manager.initAndUnwrap(entities);
assertThat(actual).isSameAs(entities);
}
@Test
public void should_return_slice_query_builder() throws Exception {
when(entityMetaMap.get(CompleteBean.class)).thenReturn(meta);
when(meta.isClusteredEntity()).thenReturn(true);
SliceQueryBuilder<CompleteBean> builder = manager.sliceQuery(CompleteBean.class);
assertThat(Whitebox.getInternalState(builder, SliceQueryExecutor.class)).isSameAs(sliceQueryExecutor);
assertThat(Whitebox.getInternalState(builder, EntityMeta.class)).isSameAs(meta);
assertThat(Whitebox.getInternalState(builder, PropertyMeta.class)).isSameAs(idMeta);
}
@Test
public void should_return_native_query_builder() throws Exception {
NativeQueryBuilder builder = manager.nativeQuery("queryString");
assertThat(builder).isNotNull();
assertThat(Whitebox.getInternalState(builder, DaoContext.class)).isSameAs(daoContext);
assertThat(Whitebox.getInternalState(builder, String.class)).isEqualTo("queryString");
}
@Test
public void should_return_typed_query_builder() throws Exception {
when(entityMetaMap.containsKey(CompleteBean.class)).thenReturn(true);
when(entityMetaMap.get(CompleteBean.class)).thenReturn(meta);
when(meta.getPropertyMetas()).thenReturn(new HashMap<String, PropertyMeta>());
TypedQueryBuilder<CompleteBean> builder = manager.typedQuery(CompleteBean.class, "queryString");
assertThat(builder).isNotNull();
verify(typedQueryValidator).validateTypedQuery(CompleteBean.class, "queryString", meta);
assertThat(Whitebox.getInternalState(builder, DaoContext.class)).isSameAs(daoContext);
assertThat(Whitebox.getInternalState(builder, EntityMeta.class)).isSameAs(meta);
assertThat(Whitebox.getInternalState(builder, PersistenceContextFactory.class)).isSameAs(contextFactory);
assertThat(Whitebox.getInternalState(builder, String.class)).isEqualTo("querystring");
}
@Test
public void should_return_raw_typed_query_builder() throws Exception {
when(entityMetaMap.containsKey(CompleteBean.class)).thenReturn(true);
when(entityMetaMap.get(CompleteBean.class)).thenReturn(meta);
when(meta.getPropertyMetas()).thenReturn(new HashMap<String, PropertyMeta>());
TypedQueryBuilder<CompleteBean> builder = manager.rawTypedQuery(CompleteBean.class, "queryString");
assertThat(builder).isNotNull();
verify(typedQueryValidator).validateRawTypedQuery(CompleteBean.class, "queryString", meta);
assertThat(Whitebox.getInternalState(builder, DaoContext.class)).isSameAs(daoContext);
assertThat(Whitebox.getInternalState(builder, EntityMeta.class)).isSameAs(meta);
assertThat(Whitebox.getInternalState(builder, PersistenceContextFactory.class)).isSameAs(contextFactory);
assertThat(Whitebox.getInternalState(builder, String.class)).isEqualTo("querystring");
}
@Test
public void should_get_native_session() throws Exception {
Session session = mock(Session.class);
when(daoContext.getSession()).thenReturn(session);
Session actual = manager.getNativeSession();
assertThat(actual).isSameAs(session);
}
@Test
public void should_get_indexed_query() throws Exception {
when(entityMetaMap.get(CompleteBean.class)).thenReturn(meta);
when(entityMetaMap.containsKey(CompleteBean.class)).thenReturn(true);
when(meta.isClusteredEntity()).thenReturn(false);
when(meta.getTableName()).thenReturn("table");
TypedQueryBuilder<CompleteBean> typedQueryBuilder = manager.indexedQuery(CompleteBean.class, new IndexCondition("column", "value"));
assertThat(Whitebox.<Object[]>getInternalState(typedQueryBuilder, "boundValues")).contains("value");
verify(typedQueryValidator).validateTypedQuery(CompleteBean.class, "SELECT * FROM table WHERE column=?;", meta);
}
}
|
minimus/final-task
|
client-src/containers/search/index.js
|
export default from './SearchContainer'
|
decilio4g/delicias-do-tchelo
|
node_modules/@styled-icons/foundation/PageCopy/PageCopy.esm.js
|
import { __assign } from "tslib";
import * as React from 'react';
import { StyledIconBase } from '@styled-icons/styled-icon';
export var PageCopy = React.forwardRef(function (props, ref) {
var attrs = {
"fill": "currentColor",
"xmlns": "http://www.w3.org/2000/svg",
};
return (React.createElement(StyledIconBase, __assign({ iconAttrs: attrs, iconVerticalAlign: "middle", iconViewBox: "0 0 100 100" }, props, { ref: ref }),
React.createElement("path", { d: "M75.272 7.482h-.005v-4.02a1.73 1.73 0 00-1.73-1.73h-32.87l-25.95 25.95v58.819c0 .956.774 1.73 1.73 1.73h57.089a1.73 1.73 0 001.73-1.73v-2.448h.005l.001-76.571zM24.674 78.276V31.142h17.723a1.73 1.73 0 001.73-1.73V11.689h21.188v66.587H24.674z", key: "k0" }),
React.createElement("path", { d: "M83.77 24.857h-3.475v66.911c0 .835-.677 1.513-1.513 1.513H29.306v3.475c0 .836.677 1.513 1.513 1.513H83.77c.836 0 1.513-.677 1.513-1.513V26.37c0-.836-.677-1.513-1.513-1.513z", key: "k1" })));
});
PageCopy.displayName = 'PageCopy';
export var PageCopyDimensions = { height: 24, width: 24 };
|
asd123freedom/san-awesome
|
src/icons/brands/sellsy.js
|
<filename>src/icons/brands/sellsy.js
import Icon from '../../components/Icon'
Icon.register({
'brands/sellsy': {
width: 640,
height: 512,
paths: [
{
d: 'M539.7 237.3c55.2 13.5 94.1 63.1 94.1 119.5 0 68-55.5 123.2-123.2 123.2h-381.2c-67.7 0-123.2-55.2-123.2-123.2 0-47.5 27.9-91 70.8-111.2-2.1-7.4-3.1-15-3.1-22.4 0-46.3 37.7-84 84-84 20.2 0 39.8 7.4 55.2 20.5 18.4-74.8 85.8-127.8 163-127.8 92.5 0 167.9 75.4 167.9 167.9 0 12.6-1.2 25.1-4.3 37.4zM199.9 401.6v-110.9c0-8.3-7-15.3-15.3-15.3h-30.9c-8.3 0-15.3 7-15.3 15.3v110.9c0 8.3 7 15.3 15.3 15.3h30.9c8.3 0 15.3-7 15.3-15.3zM289.4 401.6v-131.5c0-8.3-7-15.3-15.3-15.3h-30.9c-8.3 0-15.3 7-15.3 15.3v131.5c0 8.3 7 15.3 15.3 15.3h30.9c8.3 0 15.3-7 15.3-15.3zM378.8 401.6v-162.7c0-8.3-7-15.3-15.3-15.3h-30.9c-8.3 0-15.3 7-15.3 15.3v162.7c0 8.3 7 15.3 15.3 15.3h30.9c8.3 0 15.3-7 15.3-15.3zM465.9 401.6v-224.6c0-8.6-7-15.6-15.3-15.6h-28.5c-8.3 0-15.3 7-15.3 15.6v224.6c0 8.3 7 15.3 15.3 15.3h28.5c8.3 0 15.3-7 15.3-15.3z'
}
]
}
})
|
dpt/PrivateEye
|
libs/appengine/wimp/menu/tick-exclusive.c
|
<filename>libs/appengine/wimp/menu/tick-exclusive.c
#include "oslib/wimp.h"
#include "appengine/wimp/menu.h"
void menu_tick_exclusive(wimp_menu *menu, int entry_to_tick)
{
wimp_menu_entry *item;
wimp_menu_entry *tick;
item = menu->entries;
tick = menu->entries + entry_to_tick;
do
{
if (item == tick)
item->menu_flags |= wimp_MENU_TICKED;
else
item->menu_flags &= ~wimp_MENU_TICKED;
}
while (((item++)->menu_flags & wimp_MENU_LAST) == 0);
}
void menu_range_tick_exclusive(wimp_menu *menu,
int entry_to_tick,
int low,
int high)
{
wimp_menu_entry *end;
wimp_menu_entry *tick;
wimp_menu_entry *item;
end = menu->entries + high;
tick = menu->entries + entry_to_tick;
for (item = menu->entries + low; item <= end; item++)
{
if (item == tick)
item->menu_flags |= wimp_MENU_TICKED;
else
item->menu_flags &= ~wimp_MENU_TICKED;
}
}
|
cablelabs/Utopia
|
source/igd/src/igd_platform_independent_inf.c
|
<reponame>cablelabs/Utopia
/*
* If not stated otherwise in this file or this component's Licenses.txt file the
* following copyright and licenses apply:
*
* Copyright 2015 RDK Management
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**********************************************************************
Copyright [2014] [Cisco Systems, Inc.]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**********************************************************************/
/* Copyright (c) 2008-2009 Cisco Systems, Inc. All rights reserved.
*
* Cisco Systems, Inc. retains all right, title and interest (including all
* intellectual property rights) in and to this computer program, which is
* protected by applicable intellectual property laws. Unless you have obtained
* a separate written license from Cisco Systems, Inc., you are not authorized
* to utilize all or a part of this computer program for any purpose (including
* reproduction, distribution, modification, and compilation into object code),
* and you must immediately destroy or return to Cisco Systems, Inc. all copies
* of this computer program. If you are licensed by Cisco Systems, Inc., your
* rights to utilize this computer program are limited by the terms of that
* license. To obtain a license, please contact Cisco Systems, Inc.
*
* This computer program contains trade secrets owned by Cisco Systems, Inc.
* and, unless unauthorized by Cisco Systems, Inc. in writing, you agree to
* maintain the confidentiality of this computer program and related information
* and to not disclose this computer program and related information to any
* other person or entity.
*
* THIS COMPUTER PROGRAM IS PROVIDED AS IS WITHOUT ANY WARRANTIES, AND CISCO
* SYSTEMS, INC. EXPRESSLY DISCLAIMS ALL WARRANTIES, EXPRESS OR IMPLIED,
* INCLUDING THE WARRANTIES OF MERCHANTIBILITY, FITNESS FOR A PARTICULAR
* PURPOSE, TITLE, AND NONINFRINGEMENT.
*
*
* FileName: igd_platform_independent_inf.c
* Author: <NAME>(<EMAIL>) <NAME>(<EMAIL>)
* Jianrong(<EMAIL>)<NAME>(<EMAIL>)
* Date: 2009-05-03
* Description: Implementation including all Product-related functions
*****************************************************************************/
/*$Id: igd_platform_independent_inf.c,v 1.15 2009/05/27 03:18:47 zlipin Exp $
*
*$Log: igd_platform_independent_inf.c,v $
*Revision 1.15 2009/05/27 03:18:47 zlipin
*portmapping updated.
*
*Revision 1.14 2009/05/27 03:08:43 tahong
*delete stub code in
*IGD_pii_request_connection() and IGD_pii_force_termination()
*
*Revision 1.13 2009/05/26 09:59:55 zhangli
*Completed the cleanup activity
*
*Revision 1.11 2009/05/22 05:43:49 zlipin
*Get rid of two useless function.
*
*Revision 1.10 2009/05/22 05:39:13 zlipin
*Adjust the PII "PortMapping" module interface
*
*Revision 1.9 2009/05/21 07:58:27 zhihliu
*update PII interface
*
*Revision 1.8 2009/05/21 06:26:51 jianxiao
*add IGD_pii_get_wan_device_number/IGD_pii_get_wan_connection_device_number interface
*
*Revision 1.7 2009/05/15 08:00:21 bowan
*1st Integration
*
*Revision 1.5 2009/05/14 02:39:57 jianxiao
*Modify the interface of PAL_xml_node_GetFirstbyName
*
*Revision 1.4 2009/05/14 02:07:49 tahong
*in the comment, "pii.c"===> "igd_platform_independent_inf.c"
*
*Revision 1.3 2009/05/14 02:05:03 jianxiao
*Add the function IGD_pii_get_uuid
*
*Revision 1.2 2009/05/14 01:45:35 jianxiao
*Add the functions for WANCommonInterfaceConfig and WANEthernetLinkConfig
*
*Revision 1.1 2009/05/13 08:57:08 tahong
*create orignal version
*
*
**/
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
#include <linux/sockios.h>
#include <net/if.h>
#include <netinet/in.h>
#include <sys/ioctl.h>
#include <sys/socket.h>
#include <utctx/utctx_api.h>
#include <utapi/utapi.h>
#include <ccsp_syslog.h>
#include "pal_log.h"
#include "igd_platform_independent_inf.h"
/************************************************************
* Function: IGD_pii_get_serial_number
*
* Parameters:
* NONE
*
* Description:
* Get the serial number of the your product.
* It will be used in the description file of the IGD device
*
* Return Values: CHAR*
* The serial number of the IGD. NULL if failure.
************************************************************/
CHAR* IGD_pii_get_serial_number(VOID)
{
static char prodSn[128] = {'\0'};
/* TODO: to be implemented by OEM
ProductionDb_RetrieveAccess();
ProdDb_GetSerialNumber(prodSn);*/
return prodSn;
//return "123456789001";
}
/************************************************************
* Function: IGD_pii_get_uuid
*
* Parameters:
* uuid: OUT. The UUID of one new device.
*
* Description:
* Get the UUID for one new device.
*
* According to the UNnP spec, the different device MUST have the different
* UUID. Our IGD stack will call this function to get one new UUID when
* create one new device. That means, this function MUST return the different
* UUID when it is called every time. And one method to create UUID is provided
* in the "igd_platform_independent_inf.c".
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
// One method to create UUID for the different device
// It uses the MAC address of the interface that UNnP IGD run on as the input
#define PII_MAC_ADDRESS_LEN 6
#define PII_IF_NAME_LEN 16
#define WAN_UUID_INDEX_NUM 26
LOCAL INT32 _pii_get_if_MacAddress(IN const CHAR *ifName, INOUT CHAR MacAddress[PII_MAC_ADDRESS_LEN])
{
struct ifreq ifr;
INT32 fd;
INT32 ret = -1;
if(NULL == ifName)
return ret;
if((fd = socket(PF_INET, SOCK_RAW, IPPROTO_RAW)) == -1)
return ret;
if(fd >= 0 )
{
strncpy(ifr.ifr_name, ifName, PII_IF_NAME_LEN);
ifr.ifr_addr.sa_family = AF_INET;
if(ioctl(fd, SIOCGIFHWADDR, &ifr) == 0)
{
memcpy(MacAddress, &ifr.ifr_ifru.ifru_hwaddr.sa_data, PII_MAC_ADDRESS_LEN);
ret = 0;
}
}
close(fd);
return ret;
}
INT32 IGD_pii_get_uuid(INOUT CHAR *uuid)
{
LOCAL CHAR base_uuid[UPNP_UUID_LEN_BY_VENDER];
CHAR uuid_mac_part[PII_MAC_ADDRESS_LEN];
LOCAL BOOL get_global_uuid_once=BOOL_FALSE;
UtopiaContext utctx;
char igd_upnp_interface[10];
if(uuid == NULL)
return -1;
if(!get_global_uuid_once)
{
Utopia_Init(&utctx);
Utopia_RawGet(&utctx,NULL,"lan_ifname",igd_upnp_interface,sizeof(igd_upnp_interface));
Utopia_Free(&utctx, FALSE);
if(_pii_get_if_MacAddress(igd_upnp_interface,uuid_mac_part))
{
printf("PII get MAC fail\n");
return -1;
}
snprintf(base_uuid,UPNP_UUID_LEN_BY_VENDER,"uuid:ebf5a0a0-1dd1-11b2-a90f-%02x%02x%02x%02x%02x%02x",
(UINT8)uuid_mac_part[0],(UINT8)uuid_mac_part[1],
(UINT8)uuid_mac_part[2],(UINT8)uuid_mac_part[3],
(UINT8)uuid_mac_part[4],(UINT8)uuid_mac_part[5]);
get_global_uuid_once = BOOL_TRUE;
}
else
{
base_uuid[WAN_UUID_INDEX_NUM] = base_uuid[WAN_UUID_INDEX_NUM]+1;
if(base_uuid[WAN_UUID_INDEX_NUM]>'f')
base_uuid[WAN_UUID_INDEX_NUM]='1';
}
strncpy(uuid,base_uuid, UPNP_UUID_LEN_BY_VENDER);
return 0;
}
/************************************************************
* Function: IGD_pii_get_wan_device_number
*
* Parameters:
* NONE
*
* Description:
* Get the instance number of the WANDevice in IGD device
*
* Return Values: INT32
* The instance number of the WAN device. -1 if failure.
************************************************************/
INT32 IGD_pii_get_wan_device_number(VOID)
{
return 1;
}
/************************************************************
* Function: IGD_pii_get_lan_device_number
*
* Parameters:
* NONE
*
* Description:
* Get the instance number of the LANDevice in IGD device
*
* Return Values: INT32
* The instance number of the LAN device. -1 if failure.
************************************************************/
INT32 IGD_pii_get_lan_device_number(VOID)
{
return 1;
}
/************************************************************
* Function: IGD_pii_get_wan_connection_device_number
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice
*
* Description:
* Get the instance number of the WANConnectionDevice
* in one WANDevice specified by the input device index.
*
* Return Values: INT32
* The instance number of the WANConnectionDevice. -1 if failure.
************************************************************/
INT32 IGD_pii_get_wan_connection_device_number(IN INT32 wan_device_index)
{
(void) wan_device_index;
return 1;
}
/************************************************************
* Function: IGD_pii_get_wan_ppp_service_number
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice
*
* Description:
* Get the instance number of the WANPPPConnectionService
* in one WANConnectionDevice specified by the input device index
*
* Return Values: INT32
* The instance number of WANPPPConnectionService, -1 if failure.
************************************************************/
INT32 IGD_pii_get_wan_ppp_service_number(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
return 0; /* for USGv2 no PPP connection */
}
/************************************************************
* Function: IGD_pii_get_wan_ip_service_number
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice
*
* Description:
* Get the instance number of the WANIPConnectionService
* in one WANConnectionDevice specified by the input device index
*
* Return Values: INT32
* The instance number of WANIPConnectionService, -1 if failure.
************************************************************/
INT32 IGD_pii_get_wan_ip_service_number(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
return 1;
}
/************************************************************
* Function: IGD_pii_get_possible_connection_types
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice.
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService minimum value is 1.
* ServiceType: IN. Type of WANXXXXConnection.
* ConnectionTypesList: OUT. List of possible connection types, a comma-separated
* string.One example for WANIPConnection is "Unconfigured,IP_Routed,IP_Bridged".
*
* Description:
* Get the list of possbile connection types of one WAN(IP/PPP)ConnectionService
* specified by the input device index and service type
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
LOCAL CHAR ipconntype[32] = {0};
#define IPCONNTYPELIST "Unconfigured,IP_Routed"
INT32 IGD_pii_get_possible_connection_types(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
OUT CHAR *ConnectionTypesList)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
(void) WanConnectionServiceIndex;
(void) ServiceType;
if (ConnectionTypesList) {
strcpy(ConnectionTypesList, IPCONNTYPELIST);
}
return 0;
}
/************************************************************
* Function: IGD_pii_get_connection_status
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN connection service.
* ConnectionType: OUT. Current connection status.
*
* Description:
* Get the current connection status of one WAN(IP/PPP)ConnectionService
* specified by the input device index and service type
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_connection_status(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
OUT CHAR *ConnectionStatus)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
(void) WanConnectionServiceIndex;
(void) ServiceType;
/*
* TODO: verify if these requests need to throttled to
* avoid too many sysevent requests?
*/
wanConnectionStatus_t wan;
// PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
bzero(&wan, sizeof(wanConnectionStatus_t));
UtopiaContext ctx;
if (!Utopia_Init(&ctx)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting utctx object", __FUNCTION__);
return 1;
}
if (SUCCESS != Utopia_GetWANConnectionStatus(&ctx, &wan)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting wan connection info", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
Utopia_Free(&ctx, 0);
switch (wan.status) {
case WAN_CONNECTED:
strcpy(ConnectionStatus, "Connected");
break;
case WAN_CONNECTING:
strcpy(ConnectionStatus, "Connecting");
break;
case WAN_DISCONNECTING:
strcpy(ConnectionStatus, "Disconnecting");
break;
case WAN_DISCONNECTED:
strcpy(ConnectionStatus, "Disconnected");
break;
default:
strcpy(ConnectionStatus, "Unconfigured");
}
return 0;
}
/************************************************************
* Function: IGD_pii_get_connection_type
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanWAN(IP/PPP)ConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN connection service.
* ConnectionType: OUT. Current connection type.
*
* Description:
* Get the current connection type of one WAN(IP/PPP)ConnectionService
* specified by the input device index and service type
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_connection_type(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
OUT CHAR *ConnectionType)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
(void) WanConnectionServiceIndex;
(void) ServiceType;
strcpy(ConnectionType, "IP_Routed");
return 0;
}
/************************************************************
* Function: IGD_pii_set_connection_type
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* ConnectionType: IN. The connection type that will be set.
*
* Description:
* Set the current connection type of one WAN(IP/PPP)connectionService
* specified by the input device index and service type
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_set_connection_type(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
IN CHAR *ConnType)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
(void) WanConnectionServiceIndex;
(void) ServiceType;
strcpy(ipconntype, ConnType);
return 0;
}
/************************************************************
* Function: IGD_pii_request_connection
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* ConnectionType: IN. The connection type that will be set.
*
* Description:
* Request to initiate the connection of WAN(IP/PPP)connectionService
* specified by the input device index and service type
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_request_connection(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
(void) WanConnectionServiceIndex;
(void) ServiceType;
return 0;
}
/************************************************************
* Function: IGD_pii_force_termination
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* ConnectionType: IN. The connection type that will be set.
*
* Description:
* Force to terminate the connection of WAN(IP/PPP)connectionService
* specified by the input device index and service type
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_force_termination(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
(void) WanConnectionServiceIndex;
(void) ServiceType;
UtopiaContext ctx;
int terminate_allowed = 0;
if (Utopia_Init(&ctx)) {
PAL_LOG("igd_platform", "debug", "%s: Lock acquired ", __FUNCTION__);
terminate_allowed = Utopia_IGDInternetDisbleAllowed(&ctx);
Utopia_Free(&ctx, 0);
PAL_LOG("igd_platform", "debug", "%s: Lock released ", __FUNCTION__);
}
if (!terminate_allowed) {
PAL_LOG("igd_platform", "debug", "%s: IGD force-termination is not allowed, return action error", __FUNCTION__);
return 1;
}
if (SUCCESS != Utopia_WANConnectionTerminate()) {
PAL_LOG("igd_platform", "debug", "%s: Error terminating wan connection ", __FUNCTION__);
return 1;
}
return 0;
}
/************************************************************
* Function: IGD_pii_get_external_ip
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* ExternalIp: OUT. External IP address in string format.
*
* Description:
* Get current external IP address used by NAT for the connection of WAN(IP/PPP)connectionService
* specified by the input device index and service type
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_external_ip(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
OUT CHAR *ExternalIp)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
(void) WanConnectionServiceIndex;
(void) ServiceType;
wanConnectionStatus_t wan;
// PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
bzero(&wan, sizeof(wanConnectionStatus_t));
UtopiaContext ctx;
if (!Utopia_Init(&ctx)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting utctx object", __FUNCTION__);
return 1;
}
if (SUCCESS != Utopia_GetWANConnectionStatus(&ctx, &wan)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting wan connection info", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
Utopia_Free(&ctx, 0);
strncpy(ExternalIp, wan.ip_address, IPV4_ADDR_LEN);
return 0;
}
/************************************************************
* Function: IGD_pii_get_link_layer_max_bitrate
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* UpRate: OUT. Maximum upstream bitrate, it has a static value once a connection is setup.
* DownRate: OUT. Maximum downstream bitrate, it has a static value once a connection is setup.
*
* Description:
* Get the link layer maximum bitrates(upstream and downstream) for the connection of WAN(IP/PPP)connectionService
* specified by the input device index and service type
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_link_layer_max_bitrate(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
OUT CHAR *UpRate,
OUT CHAR *DownRate)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
(void) WanConnectionServiceIndex;
(void) ServiceType;
strcpy(UpRate, "10000000");
strcpy(DownRate, "10000000");
return 0;
}
/************************************************************
* Function: IGD_pii_get_up_time
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* UpTime: OUT. The time in seconds that this connection has stayed up.
*
* Description:
* Get the time in seconds that the connection has stayed up.
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_up_time(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
OUT CHAR *UpTime)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
(void) WanConnectionServiceIndex;
(void) ServiceType;
wanConnectionStatus_t wan;
// PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
bzero(&wan, sizeof(wanConnectionStatus_t));
UtopiaContext ctx;
if (!Utopia_Init(&ctx)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting utctx object", __FUNCTION__);
return 1;
}
if (SUCCESS != Utopia_GetWANConnectionStatus(&ctx, &wan)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting wan connection info", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
Utopia_Free(&ctx, 0);
sprintf(UpTime, "%ld", wan.uptime);
return 0;
}
/************************************************************
* Function: IGD_pii_get_NAT_RSIP_status
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* NATEnable: OUT. Value=1(NAT is enabled) or 0(NAT is disabled)
* RSIPAvailable: OUT. Value=1(RSIP is supported) or 0(RSIP isn't supported)
*
* Description:
* Get the current state of NAT and RSIP for the connection of WAN(IP/PPP)connectionService
* specified by the input device index and service type
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32
IGD_pii_get_NAT_RSIP_status( IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
OUT BOOL *natStatus,
OUT BOOL *rsipStatus
)
{
*natStatus = BOOL_TRUE;
*rsipStatus = BOOL_FALSE;
boolean_t natEnable = BOOL_TRUE; /*RDKB-7142, CID-32964; init before use */
// PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
UtopiaContext ctx;
if (Utopia_Init(&ctx)) {
Utopia_GetRouteNAT(&ctx, &natEnable);
Utopia_Free(&ctx, 0);
}
*natStatus = (natEnable == 1) ? BOOL_TRUE : BOOL_FALSE;
printf("IGD_pii_get_NAT_RSIP_status is called, %d and %d is returned.\n", *natStatus, *rsipStatus);
printf(" interface: %d-%d-%d-%d\n", WanDeviceIndex, WanConnectionDeviceIndex, WanConnectionServiceIndex, ServiceType);
printf(" %d and %d is returned.\n", *natStatus, *rsipStatus);
return 0;
}
/************************************************************
* Function: IGD_pii_add_portmapping_entry
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* PortmappingEntry: IN. The portmapping entry to be added.
*
* Description:
* Add a new port mapping or overwrites an existing mapping with the same internal client.
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_add_portmapping_entry( IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
IN PIGD_PortMapping_Entry portmapEntry
)
{
syslog_systemlog("IGD", LOG_NOTICE, "Add Port mapping %s:%d to %s:%d",
portmapEntry->remoteHost, portmapEntry->externalPort,
portmapEntry->internalClient, portmapEntry->internalPort);
PAL_LOG("igd_platform", "debug", "%s: desc %s, ext_port %d, int_port %d ", __FUNCTION__, portmapEntry->description, portmapEntry->externalPort, portmapEntry->internalPort);
UtopiaContext ctx;
int rc = 0;
if (Utopia_Init(&ctx)) {
int index;
portMapDyn_t pmap;
protocol_t proto = (0 == strcasecmp(portmapEntry->protocol, "TCP")) ? TCP : UDP;
PAL_LOG("igd_platform", "debug", "%s: Lock acquired ", __FUNCTION__);
if (!Utopia_IGDConfigAllowed(&ctx)) {
PAL_LOG("igd_platform", "debug", "%s: IGD config disabled in administration, return action error", __FUNCTION__);
Utopia_Free(&ctx, 0);
PAL_LOG("igd_platform", "debug", "%s: Lock released ", __FUNCTION__);
return 1;
}
bzero(&pmap, sizeof(pmap));
/*
* check if entry already exist using (RemoteHost, ExternalPort, PortMappingProtocol) tuple
*/
if (UT_SUCCESS == Utopia_FindDynPortMapping(portmapEntry->remoteHost,
portmapEntry->externalPort,
proto,
&pmap, &index)) {
/*
* if for same internal client, update leasttime and return success
*/
if (0 == strcmp(portmapEntry->internalClient, pmap.internal_host)) {
if (portmapEntry->description != NULL) {
strncpy(pmap.name, portmapEntry->description, sizeof(pmap.name));
}
pmap.lease = portmapEntry->leaseTime;
if (( portmapEntry->internalPort == pmap.internal_port ) && ( pmap.enabled == (boolean_t) portmapEntry->enabled ))
{
printf("Internal port is also same, no need to restart firewall\n");
(void) Utopia_UpdateDynPortMapping_WithoutFirewallRestart(index, &pmap);
}
else
{
printf("Internal port/ enabled status is different, update dyn port event. Need to restart firewall\n");
pmap.enabled = (boolean_t) portmapEntry->enabled;
pmap.internal_port = portmapEntry->internalPort;
(void) Utopia_UpdateDynPortMapping(index, &pmap);
}
rc = 0;
} else {
/*
* if for different internal client, return error
*/
PAL_LOG("igd_platform", "debug", "%s: entry exists for different internal client (error)", __FUNCTION__);
rc = 1;
}
} else {
/*
* Create new entry
* for unique ([remote-host], external-port, protocol)
*/
pmap.enabled = (boolean_t) portmapEntry->enabled;
if (portmapEntry->description != NULL) {
strncpy(pmap.name, portmapEntry->description, sizeof(pmap.name));
}
pmap.external_port = portmapEntry->externalPort;
if (portmapEntry->remoteHost != NULL) {
strncpy(pmap.external_host, portmapEntry->remoteHost, sizeof(pmap.external_host));
}
pmap.internal_port = portmapEntry->internalPort;
if (portmapEntry->internalClient != NULL) {
strncpy(pmap.internal_host, portmapEntry->internalClient, sizeof(pmap.internal_host));
}
pmap.lease = portmapEntry->leaseTime;
pmap.protocol = proto;
int st = Utopia_AddDynPortMapping(&pmap);
if (UT_SUCCESS == st) {
PAL_LOG("igd_platform", "debug", "%s: successfully added port map entry", __FUNCTION__);
rc = 0;
} else {
PAL_LOG("igd_platform", "debug", "%s: Error, adding port map entry", __FUNCTION__);
rc = 1;
}
}
Utopia_Free(&ctx, 0);
PAL_LOG("igd_platform", "debug", "%s: Lock released ", __FUNCTION__);
}
return rc;
}
/************************************************************
* Function: IGD_pii_del_portmapping_entry
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* RemoteHost: IN. Remote host.
* ExternalPort: IN. External port.
* Protocol: IN. PortMapping protocol.
*
* Description:
* Delete a previously instantiated port mapping.
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_del_portmapping_entry( IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
IN CHAR *RemoteHost,
IN UINT16 ExternalPort,
IN CHAR *Protocol
)
{
syslog_systemlog("IGD", LOG_NOTICE, "Delete Port mapping %s:%d", RemoteHost, ExternalPort);
PAL_LOG("igd_platform", "debug", "%s: remote-host %s, ext_port %d, protocol %s ", __FUNCTION__, RemoteHost, ExternalPort, Protocol);
UtopiaContext ctx;
int st, rc = 1;
if (Utopia_Init(&ctx)) {
portMapDyn_t portmap;
bzero(&portmap, sizeof(portmap));
PAL_LOG("igd_platform", "debug", "%s: Lock acquired ", __FUNCTION__);
portmap.external_port = ExternalPort;
portmap.protocol = (0 == strcasecmp(Protocol, "TCP")) ? TCP : UDP;
if (RemoteHost) {
strncpy(portmap.external_host, RemoteHost, sizeof(portmap.external_host));
}
st = Utopia_DeleteDynPortMapping(&portmap);
if (UT_SUCCESS == st) {
PAL_LOG("igd_platform", "debug", "%s: successfully deleted port map entry", __FUNCTION__);
rc = 0;
} else {
PAL_LOG("igd_platform", "debug", "%s: failed to delete port map entry", __FUNCTION__);
rc = 1;
}
Utopia_Free(&ctx, 0);
PAL_LOG("igd_platform", "debug", "%s: Lock released ", __FUNCTION__);
}
return rc;
}
/************************************************************
* Function: IGD_pii_get_portmapping_entry_num
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* PortmappingEntryNum: OUT. The total number of the PortMapping entry .
*
* Description:
* Get the total number of the PortMapping entry.
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_portmapping_entry_num(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
OUT INT32 *PortmappingEntryNum)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
(void) WanConnectionServiceIndex;
(void) ServiceType;
// PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
int totalEntryNum = 0;
Utopia_GetDynPortMappingCount(&totalEntryNum);
// PAL_LOG("igd_platform", "debug", "%s: count = %d", __FUNCTION__, totalEntryNum);
if(PortmappingEntryNum != NULL)
(*PortmappingEntryNum) = totalEntryNum;
/*
* Called roughly once per-second to re-validate existing entries
*/
/* Register an independent thread on the timer list */
/*Utopia_InvalidateDynPortMappings();*/
return 0;
}
/************************************************************
* Function: IGD_pii_get_portmapping_entry_generic
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* PortmappingIndex: IN. The index of the portmapping entry. Value range: 0-PortmappingEntryNum
* PortmappingEntry: OUT. The portmapping entry.
*
* Description:
* Get one portmapping entry specified by the input index.
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_portmapping_entry_generic( IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
IN INT32 PortmappingIndex,
OUT PIGD_PortMapping_Entry PortmappingEntry)
{
printf("IGD_pii_get_portmapping_entry_generic is called.\n");
printf(" interface: %d-%d-%d-%d\n", WanDeviceIndex, WanConnectionDeviceIndex, WanConnectionServiceIndex, ServiceType);
printf(" PortmappingIndex: %d\n", PortmappingIndex);
PAL_LOG("igd_platform", "debug", "%s: for index %d", __FUNCTION__, PortmappingIndex);
/*
* IGD array runs from 0 to PortMappingNumberOfEntries - 1
* Utopia API entry run from 1 to PortMappingNumberOfEntries
* .. so match up accordingly
*/
UtopiaContext ctx;
int rc = 0;
if (Utopia_Init(&ctx)) {
portMapDyn_t portmap;
int count = 0, st;
PAL_LOG("igd_platform", "debug", "%s: Lock acquired ", __FUNCTION__);
if (UT_SUCCESS != Utopia_GetDynPortMappingCount(&count)) {
PAL_LOG("igd_platform", "debug", "%s: Lock released 1", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
if (PortmappingIndex < 0 || PortmappingIndex >= count) {
PAL_LOG("igd_platform", "debug", "%s: Lock released 2", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
bzero(&portmap, sizeof(portMapDyn_t));
if (UT_SUCCESS != (st = Utopia_GetDynPortMapping(PortmappingIndex+1, &portmap))) {
PAL_LOG("igd_platform", "debug", "%s: Utopia_GetDynPortMapping failed (rc=%d)", __FUNCTION__, st);
PAL_LOG("igd_platform", "debug", "%s: Lock released 3", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
PortmappingEntry->enabled = portmap.enabled;
strncpy(PortmappingEntry->description, portmap.name, PORT_MAP_DESCRIPTION_LEN);
PortmappingEntry->leaseTime = portmap.lease;
if (portmap.protocol == TCP) {
strcpy(PortmappingEntry->protocol, "TCP");
} else {
strcpy(PortmappingEntry->protocol, "UDP");
}
PortmappingEntry->externalPort = portmap.external_port;
strcpy(PortmappingEntry->remoteHost, portmap.external_host);
PortmappingEntry->internalPort = portmap.internal_port;
/* internalClient is smaller than internal_host, avoid string overrun. */
strncpy(PortmappingEntry->internalClient, portmap.internal_host, IPV4_ADDR_LEN-1);
PAL_LOG("igd_platform", "debug", "%s: Lock released ", __FUNCTION__);
Utopia_Free(&ctx, 0);
}
return rc;
}
/************************************************************
* Function: IGD_pii_get_portmapping_entry_specific
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* WanConnectionServiceIndex: IN. Index of WAN(IP/PPP)ConnectionService,range:1-Number of WAN(IP/PPP)ConnectionService
* ServiceType: IN. Type of WAN(IP/PPP)connectionService.
* PortmappingEntry: INOUT. The portmapping entry.
*
* Description:
* Get one portmapping entry specified by the unique tuple of
* RemoteHost,ExteralPort and Protocol in the input parameter,PortmappingEntry
*
* Related UPnP Device/Service: WAN(IP/PPP)ConnectionService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_portmapping_entry_specific( IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
IN INT32 WanConnectionServiceIndex,
IN INT32 ServiceType,
INOUT PIGD_PortMapping_Entry PortmappingEntry)
{
printf("IGD_pii_get_portmapping_entry_specific is called.\n");
printf(" interface: %d-%d-%d-%d\n", WanDeviceIndex, WanConnectionDeviceIndex, WanConnectionServiceIndex, ServiceType);
printf(" Remote Host: %s\n", PortmappingEntry->remoteHost);
printf(" External Port: %d\n", PortmappingEntry->externalPort);
printf(" PortMapping Protocol: %s\n", PortmappingEntry->protocol);
PAL_LOG("igd_platform", "debug", "%s: Remote Host: %s, External Port: %d, PortMapping Protocol: %s\n", __FUNCTION__, PortmappingEntry->remoteHost, PortmappingEntry->externalPort, PortmappingEntry->protocol);
/*
* IGD array runs from 0 to PortMappingNumberOfEntries - 1
* syscfg entry run from 1 to PortMappingNumberOfEntries
* .. so match up accordingly
*
*/
UtopiaContext ctx;
int rc = 1;
if (Utopia_Init(&ctx)) {
int index;
portMapDyn_t pmap;
PAL_LOG("igd_platform", "debug", "%s: Lock acquired ", __FUNCTION__);
bzero(&pmap, sizeof(pmap));
/*
* check for entry using (RemoteHost, ExternalPort, PortMappingProtocol) tuple
*/
protocol_t proto = (0 == strcasecmp(PortmappingEntry->protocol, "TCP")) ? TCP : UDP;
if (UT_SUCCESS == Utopia_FindDynPortMapping(PortmappingEntry->remoteHost,
PortmappingEntry->externalPort,
proto,
&pmap, &index)) {
PortmappingEntry->enabled = pmap.enabled;
strncpy(PortmappingEntry->description, pmap.name, PORT_MAP_DESCRIPTION_LEN);
PortmappingEntry->leaseTime = pmap.lease;
PortmappingEntry->internalPort = pmap.internal_port;
/* internalClient is smaller than internal_host, avoid string overrun. */
strncpy(PortmappingEntry->internalClient, pmap.internal_host, IPV4_ADDR_LEN-1);
rc = 0;
} else {
PAL_LOG("igd_platform", "debug", "%s: couldn't find entry", __FUNCTION__);
rc = 1;
}
Utopia_Free(&ctx, 0);
PAL_LOG("igd_platform", "debug", "%s: Lock released ", __FUNCTION__);
}
return rc;
}
/************************************************************
* Function: IGD_pii_get_ethernet_link_status
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanConnectionDeviceIndex: IN. Index of WANConnectionDevice, range:1-Number of WANConnectionDevice..
* EthernetLinkStatus: OUT. The status of the WNA Ethernet link.
*
* Description:
* Get the link status of the Ethernet connection specified by the input device index
* Related UPnP Device/Service: WANEthernetLinkConfigService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_ethernet_link_status(IN INT32 WanDeviceIndex,
IN INT32 WanConnectionDeviceIndex,
OUT CHAR *EthernetLinkStatus)
{
(void) WanDeviceIndex;
(void) WanConnectionDeviceIndex;
// PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
wanConnectionStatus_t wan;
bzero(&wan, sizeof(wanConnectionStatus_t));
UtopiaContext ctx;
if (!Utopia_Init(&ctx)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting utctx object", __FUNCTION__);
return 1;
}
if (SUCCESS != Utopia_GetWANConnectionStatus(&ctx, &wan)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting wan connection info", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
Utopia_Free(&ctx, 0);
if (0 != wan.phylink_up) {
strncpy(EthernetLinkStatus,ETHERNETLINKSTATUS_UP,16);
} else {
strncpy(EthernetLinkStatus,ETHERNETLINKSTATUS_DOWN,16);
}
return 0;
}
/************************************************************
* Function: IGD_pii_get_common_link_properties
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanAccessType: OUT. The type of the WAN access.
* Layer1UpstreamMaxBitRate: OUT. The MAX upstream theoretical bit rate(in bit/s) for the WAN device.
* Layer1DownstreamMaxBitRate: OUT. The MAX downstream theoretical bit rate(in bit/s) for the WAN device.
* PhyscialLinkStatus: OUT. The state of the physical connection(link) from WANDevice to a connected entity.
*
* Description:
* Get the common link properties of the WAN device specified by the input device index.
* Related UPnP Device/Service: WANCommonInterfaceConfigService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_common_link_properties(IN INT32 WanDeviceIndex,
OUT CHAR *WanAccessType,
OUT CHAR *Layer1UpstreamMaxBitRate,
OUT CHAR *Layer1DownstreamMaxBitRate,
OUT CHAR *PhyscialLinkStatus)
{
(void) WanDeviceIndex;
// PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
wanConnectionStatus_t wan;
bzero(&wan, sizeof(wanConnectionStatus_t));
UtopiaContext ctx;
if (!Utopia_Init(&ctx)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting utctx object", __FUNCTION__);
return 1;
}
if (SUCCESS != Utopia_GetWANConnectionStatus(&ctx, &wan)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting wan connection info", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
Utopia_Free(&ctx, 0);
strncpy(WanAccessType,WANACCESSTYPE_ETHERNET,16);
strncpy(Layer1UpstreamMaxBitRate,"100000000",16);
strncpy(Layer1DownstreamMaxBitRate,"100000000",16);
if (0 != wan.phylink_up) {
strncpy(PhyscialLinkStatus,LINKSTATUS_UP,16);
} else {
strncpy(PhyscialLinkStatus,LINKSTATUS_DOWN,16);
}
return 0;
}
/************************************************************
* Function: IGD_pii_get_traffic_stats
*
* Parameters:
* WanDeviceIndex: IN. Index of WANDevice, range:1-Number of WANDevice.
* WanAccessType: IN. The type of the WAN access.
* bufsz: IN. size of output buffer (same for all four params)
* TotalBytesSent: OUT. Total bytes sent on the WAN device.
* TotalBytesReceived: OUT. Total bytes received on the WAN device.
* TotalPacketsSent: OUT. Total packets sent on the WAN device.
* TotalPacketsReceived: OUT. Total packets received on the WAN device.
*
* Description:
* Get the traffice statistics of the WAN device specified by the input device index.
* Related UPnP Device/Service: WANCommonInterfaceConfigService
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_traffic_stats(IN INT32 WanDeviceIndex,
IN INT32 bufsz,
OUT CHAR *TotalBytesSent,
OUT CHAR *TotalBytesReceived,
OUT CHAR *TotalPacketsSent,
OUT CHAR *TotalPacketsReceived)
{
(void) WanDeviceIndex;
PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
wanTrafficInfo_t wan;
bzero(&wan, sizeof(wanTrafficInfo_t));
if (UT_SUCCESS != Utopia_GetWANTrafficInfo(&wan)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting wan traffic statistics", __FUNCTION__);
return 1;
}
if (TotalBytesSent) {
snprintf(TotalBytesSent, bufsz, "%d", (unsigned int) wan.bytes_sent);
}
if (TotalBytesReceived) {
snprintf(TotalBytesReceived, bufsz, "%d", (unsigned int) wan.bytes_rcvd);
}
if (TotalPacketsSent) {
snprintf(TotalPacketsSent, bufsz, "%d", (unsigned int) wan.pkts_sent);
}
if (TotalPacketsReceived) {
snprintf(TotalPacketsReceived, bufsz, "%d", (unsigned int) wan.pkts_rcvd);
}
return 0;
}
/************************************************************
* Function: IGD_pii_get_lan_dhcpserver_configurable
* Parameters:
* LanDeviceIndex: IN. Index of LANDevice, range:1-Number of LANDevice.
* status: OUT. status
*
* Description:
* It is security violation to allow DHCP Server to be configurable using UPnP IGD
* currently there is no authentication to protect DHCP server set methods.
* hence return NOT configurable
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_lan_dhcpserver_configurable(IN INT32 LanDeviceIndex, OUT CHAR *status)
{
(void) LanDeviceIndex;
PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
strcpy(status, "0");
return 0;
}
/************************************************************
* Function: IGD_pii_get_lan_dhcp_relay
* Parameters:
* LanDeviceIndex: IN. Index of LANDevice, range:1-Number of LANDevice.
* status: OUT. status
*
* Description:
* Checks if we are in bridge mode, if yes return 1
* if we are in router mode return 0
* to be enhanced as part of LAN Auto-Bridging feature
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_lan_dhcp_relay_status(IN INT32 LanDeviceIndex, OUT CHAR *status)
{
(void) LanDeviceIndex;
PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
strcpy(status, "0");
return 0;
}
/************************************************************
* Function: IGD_pii_get_lan_info
* Parameters:
* LanDeviceIndex: IN. Index of LANDevice, range:1-Number of LANDevice.
* bufsz: IN. buffer size of OUT params (they are all need to be same size)
* ipaddr: OUT. IP address of the LAN device
* subnet_mask: OUT. subnet mask address of the device
* domai_name: OUT. domain name of the device
*
* Description:
* Returns various LAN Device settings
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_lan_info(IN INT32 LanDeviceIndex, IN INT32 bufsz, OUT CHAR *ipaddr, OUT CHAR *subnet_mask, OUT CHAR *domain_name)
{
(void) LanDeviceIndex;
PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
lanSetting_t lan;
bzero(&lan, sizeof(lanSetting_t));
UtopiaContext ctx;
if (!Utopia_Init(&ctx)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting utctx object", __FUNCTION__);
return 1;
}
if (SUCCESS != Utopia_GetLanSettings(&ctx, &lan)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting lan settings info", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
Utopia_Free(&ctx, 0);
if (ipaddr) {
strncpy(ipaddr, lan.ipaddr, bufsz);
}
if (subnet_mask) {
strncpy(subnet_mask, lan.netmask, bufsz);
}
if (domain_name) {
strncpy(domain_name, lan.domain, bufsz);
}
return 0;
}
/************************************************************
* Function: IGD_pii_get_lan_dns_servers
* Parameters:
* LanDeviceIndex: IN. Index of LANDevice, range:1-Number of LANDevice.
* max_list_sz: IN. buffer size of OUT params (they are all need to be same size)
* dns_servers: OUT. comma separated list of dns servers
*
* Description:
* Returns various LAN DNS Servers
* Currently system uses router's dns proxy as the LAN's dns server,
* so just return LAN default gw address as the DNS server address
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_lan_dns_servers(IN INT32 LanDeviceIndex, OUT CHAR *dns_servers, IN INT32 max_list_sz)
{
(void) LanDeviceIndex;
PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
lanSetting_t lan;
bzero(&lan, sizeof(lanSetting_t));
UtopiaContext ctx;
if (!Utopia_Init(&ctx)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting utctx object", __FUNCTION__);
return 1;
}
if (SUCCESS != Utopia_GetLanSettings(&ctx, &lan)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting lan settings info", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
Utopia_Free(&ctx, 0);
if (dns_servers) {
strncpy(dns_servers, lan.ipaddr, max_list_sz);
}
return 0;
}
/************************************************************
* Function: IGD_pii_get_lan_addr_range
* Parameters:
* LanDeviceIndex: IN. Index of LANDevice, range:1-Number of LANDevice.
* buf_sz: IN. buffer size of OUT params (they are all need to be same size)
* min_address: OUT. start address of the range
* max_address: OUT. end address of the range
*
* Description:
* Returns various LAN DHCP Server's DHCP address range
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_lan_addr_range(IN INT32 LanDeviceIndex, IN INT32 buf_sz, OUT CHAR *min_address, OUT CHAR *max_address)
{
(void) LanDeviceIndex;
PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
dhcpServerInfo_t dhcps;
lanSetting_t lan;
bzero(&lan, sizeof(lanSetting_t));
bzero(&dhcps, sizeof(dhcpServerInfo_t));
UtopiaContext ctx;
if (!Utopia_Init(&ctx)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting utctx object", __FUNCTION__);
return 1;
}
if (SUCCESS != Utopia_GetDHCPServerSettings(&ctx, &dhcps)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting lan settings info", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
if (SUCCESS != Utopia_GetLanSettings(&ctx, &lan)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting lan settings info", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
Utopia_Free(&ctx, 0);
PAL_LOG("igd_platform", "debug", "%s: lan.ipaddr %s", __FUNCTION__, lan.ipaddr);
int octet1, octet2, octet3, last_octet;
int ct = sscanf(lan.ipaddr, "%d.%d.%d.%d", &octet1, &octet2, &octet3, &last_octet);
PAL_LOG("igd_platform", "debug", "%s: p [%s], sscanf ct %d", __FUNCTION__, lan.ipaddr, ct);
if (4 == ct) {
snprintf(min_address, buf_sz, "%d.%d.%d.%s", octet1, octet2, octet3, dhcps.DHCPIPAddressStart);
int end_ip_octet = atoi(dhcps.DHCPIPAddressStart) + dhcps.DHCPMaxUsers - 1;
snprintf(max_address, buf_sz, "%d.%d.%d.%d", octet1, octet2, octet3, end_ip_octet);
}
return 0;
}
/************************************************************
* Function: IGD_pii_get_lan_reserved_addr_list
* Parameters:
* LanDeviceIndex: IN. Index of LANDevice, range:1-Number of LANDevice.
* max_list_sz: IN. buffer size of OUT params (they are all need to be same size)
* reserved_list: OUT. comma separated list of reserved DHCP addresses
*
* Description:
* Returns LAN DHCP Server's reserverd DHCP addresses
*
* Return Values: INT32
* 0 if successful else error code.
************************************************************/
INT32 IGD_pii_get_lan_reserved_addr_list(IN INT32 LanDeviceIndex, OUT CHAR *reserved_list, IN INT32 max_list_sz)
{
(void) LanDeviceIndex;
PAL_LOG("igd_platform", "debug", "%s: Enter ", __FUNCTION__);
DHCPMap_t *dhcp_static_hosts = NULL;
int dhcp_static_hosts_count = 0;
lanSetting_t lan;
bzero(&lan, sizeof(lanSetting_t));
UtopiaContext ctx;
if (!Utopia_Init(&ctx)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting utctx object", __FUNCTION__);
return 1;
}
if (SUCCESS != Utopia_GetLanSettings(&ctx, &lan)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting lan settings info", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
if (SUCCESS != Utopia_GetDHCPServerStaticHosts(&ctx, &dhcp_static_hosts_count, &dhcp_static_hosts)) {
PAL_LOG("igd_platform", "debug", "%s: Error, in getting lan settings info", __FUNCTION__);
Utopia_Free(&ctx, 0);
return 1;
}
Utopia_Free(&ctx, 0);
PAL_LOG("igd_platform", "debug", "%s: ipaddr [%s], host ct [%d]", __FUNCTION__, lan.ipaddr, dhcp_static_hosts_count);
int octet1, octet2, octet3, last_octet;
int i, ct;
ct = sscanf(lan.ipaddr, "%d.%d.%d.%d", &octet1, &octet2, &octet3, &last_octet);
if (4 == ct) {
int first = 1;
char ipaddr[32];
for (i = 0; i < dhcp_static_hosts_count; i++) {
if (first) {
// append a comma
strncat(reserved_list, ",", max_list_sz);
} else {
first = 0;
}
PAL_LOG("igd_platform", "debug", "%s: index [%d], name [%s], host_ip [%d], mac [%s]", __FUNCTION__, i, dhcp_static_hosts[i].client_name, dhcp_static_hosts[i].host_ip, dhcp_static_hosts[i].macaddr);
snprintf(ipaddr, sizeof(ipaddr), "%d.%d.%d.%d",
octet1, octet2, octet3, dhcp_static_hosts[i].host_ip);
strncat(reserved_list, ipaddr, max_list_sz);
}
}
if (dhcp_static_hosts) {
free(dhcp_static_hosts);
}
return 0;
}
|
marstona/ovh-java-sdk
|
ovh-java-sdk-cloud/src/main/java/net/minidev/ovh/api/cloud/OvhRegionContinent.java
|
<reponame>marstona/ovh-java-sdk
package net.minidev.ovh.api.cloud;
/**
* Enum values for ContinentCode
*/
public enum OvhRegionContinent {
EU("EU"),
NA("NA"),
US("US"),
ASIA("ASIA");
final String value;
OvhRegionContinent(String s) {
this.value = s;
}
public String toString() {
return this.value;
}
}
|
gigmap/back-end
|
test/unit/rest/middleware/makeResponse.test.js
|
const {expect} = require('chai');
const {describe, it} = require('mocha');
const ErrorMessage = require('../../../../src/rest/common/ErrorMessage');
const makeResponse = require('../../../../src/rest/middleware/makeResponse');
const formatError = require('../../../lib/formatResponseError');
describe('makeResponse', function () {
it('should make response with string message', function () {
const response = makeResponse(200, 'CODE');
expect(response).to.be.deep.equal({status: 200, body: formatError('CODE')});
});
it('should make response with ErrorMessage', function () {
const response = makeResponse(200, new ErrorMessage().add('CODE'));
expect(response).to.be.deep.equal({status: 200, body: formatError('CODE')});
});
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.