Skip to content

Commit 1ecf310

Browse files
committed
Define new type for variables representing numbers of samples
1 parent dca8d93 commit 1ecf310

3 files changed

Lines changed: 31 additions & 24 deletions

File tree

include/plugin.h

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
#define GSF_PLUGIN_H 1
33

44
#include "deadbeef/deadbeef.h"
5+
#include "types.h"
56

67
#ifdef __cplusplus
78
extern "C" {
@@ -26,7 +27,7 @@ struct PluginState {
2627
~PluginState();
2728

2829
DB_fileinfo_t fFileInfo;
29-
long readsample;
30+
sample_t readsample;
3031
bool fInit;
3132
uint32_t hints;
3233
TrackMetadata fMetadata;

include/types.h

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
#ifndef GSF_TYPES_H
2+
#define GSF_TYPES_H 1
3+
4+
typedef long sample_t;
5+
6+
#endif

src/play.cpp

Lines changed: 23 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -22,33 +22,33 @@ inline PluginState *get_plugin_state(DB_fileinfo_t *_info) {
2222
return (PluginState*)_info;
2323
}
2424

25-
inline constexpr long length_to_samples(const double length_ms, const long sample_rate) {
25+
inline constexpr sample_t length_to_samples(const double length_ms, const sample_t sample_rate) {
2626
return sample_rate * length_ms / 1000;
2727
}
2828

29-
inline constexpr long total_length_samples(const TrackMetadata &meta, const long sample_rate) {
29+
inline constexpr sample_t total_length_samples(const TrackMetadata &meta, const sample_t sample_rate) {
3030
return length_to_samples(meta.Length, sample_rate) + length_to_samples(meta.Fadeout, sample_rate);
3131
}
3232

33-
inline constexpr long total_length_seconds(const TrackMetadata &meta) {
33+
inline constexpr sample_t total_length_seconds(const TrackMetadata &meta) {
3434
return (float)(meta.Length + meta.Fadeout) / 1000.0;
3535
}
3636

37-
inline constexpr int16_t linear_fade(const int16_t sample, const long sample_n, const long fadeout_start, const long fadeout_samples) {
37+
inline constexpr int16_t linear_fade(const int16_t sample, const sample_t sample_n, const sample_t fadeout_start, const sample_t fadeout_samples) {
3838
if (sample_n < fadeout_start)
3939
return sample;
4040

4141
// don't worry about x > fadeout_samples; this should never happen
4242
// as any earlier checks will end the track before then
43-
const long x = sample_n - fadeout_start;
43+
const sample_t x = sample_n - fadeout_start;
4444
const double m = 1.0 / (double)(fadeout_samples);
4545
double factor = 1 - m*x;
4646
return factor * sample;
4747
}
4848

4949
// used for determining a factor that reduces the signal to
5050
// A*lower_threshold after fadeout_samples
51-
inline const double log_fade_factor(const long fadeout_samples, const double lower_threshold) {
51+
inline const double log_fade_factor(const sample_t fadeout_samples, const double lower_threshold) {
5252
// s'(n) = f**n * s(n)
5353
// want f such that for n=fadeout_samples, f**n = lower_threshold
5454
// f = lower_threshold**(1/n)
@@ -57,44 +57,44 @@ inline const double log_fade_factor(const long fadeout_samples, const double low
5757

5858
// used for determining a factor that reduces the signal to A*factor
5959
// after fadeout_samples/2
60-
inline const double log_fade_half_factor(const long fadeout_samples, const double factor) {
60+
inline const double log_fade_half_factor(const sample_t fadeout_samples, const double factor) {
6161
// s'(n) = f**n * s(n)
6262
// want f such that for n=fadeout_samples/2, f**n = factor
6363
// f = factor**(2/fadeout_samples)
6464
return pow(factor, (double)2.0/(double)fadeout_samples);
6565
}
6666

67-
inline int16_t log_fade(const int16_t sample, const long sample_n,
68-
const long fadeout_start,
67+
inline int16_t log_fade(const int16_t sample, const sample_t sample_n,
68+
const sample_t fadeout_start,
6969
const double fadeout_factor) {
7070
if (sample_n < fadeout_start)
7171
return sample;
7272

73-
const double n = sample_n - fadeout_start;
74-
const double f = pow(fadeout_factor, n);
73+
const sample_t n = sample_n - fadeout_start;
74+
const double f = pow(fadeout_factor, (double)n);
7575
return f * sample;
7676
}
7777

7878
inline size_t adjust_track_end(DB_functions_t *deadbeef, size_t to_copy, PluginState *state) {
79-
const long sample_rate = state->fFileInfo.fmt.samplerate;
79+
const sample_t sample_rate = state->fFileInfo.fmt.samplerate;
8080
// if we would copy more samples than the length of the file, we
8181
// need to trim the buffer, but ONLY if we aren't looping!
8282
const bool should_loop = (deadbeef->streamer_get_repeat () == DDB_REPEAT_SINGLE) && (state->hints & DDB_DECODER_HINT_CAN_LOOP);
8383
const bool use_log_fade = state->use_log_fade;
8484
if (!should_loop) {
85-
const long total_samples = total_length_samples(state->fMetadata, sample_rate);
86-
const long length_samples = length_to_samples(state->fMetadata.Length, sample_rate);
87-
const long fadeout_samples = length_to_samples(state->fMetadata.Fadeout, sample_rate);
85+
const sample_t total_samples = total_length_samples(state->fMetadata, sample_rate);
86+
const sample_t length_samples = length_to_samples(state->fMetadata.Length, sample_rate);
87+
const sample_t fadeout_samples = length_to_samples(state->fMetadata.Fadeout, sample_rate);
8888

89-
const long readsample = state->readsample;
90-
const long remaining_samples = total_samples - readsample;
89+
const sample_t readsample = state->readsample;
90+
const sample_t remaining_samples = total_samples - readsample;
9191
// one sample is 4 bytes (16-bit per channel, 2 channels), so we
9292
// must convert remaining_samples into bytes and use this value.
9393
const size_t remaining_bytes = remaining_samples * 4;
9494
if (to_copy > remaining_bytes)
9595
to_copy = remaining_bytes;
9696

97-
const long fadeout_start = length_samples;
97+
const sample_t fadeout_start = length_samples;
9898
// each sample is 4 bytes with 2 bytes per channel
9999
// fadeout must be applied to each channel separately
100100
int16_t* channel_samples = (int16_t*)state->output.sample_buffer.data();
@@ -154,7 +154,7 @@ int gsf_init(DB_fileinfo_t *info, DB_playItem_t *it) {
154154
}
155155
#endif
156156

157-
long config_sample_rate = deadbeef->conf_get_int64 ("gsf.samplerate", DEFAULT_SAMPLE_RATE);
157+
sample_t config_sample_rate = deadbeef->conf_get_int64 ("gsf.samplerate", DEFAULT_SAMPLE_RATE);
158158
state->use_log_fade = deadbeef->conf_get_int("gsf.log_fade", 1);
159159

160160
info->fmt.bps = 16;
@@ -233,7 +233,7 @@ int gsf_read(DB_fileinfo_t *_info, char *buffer, int nbytes) {
233233
auto deadbeef = get_API_pointer();
234234
auto plugin = get_plugin_pointer();
235235
PluginState *state = get_plugin_state(_info);
236-
const long sample_rate = _info->fmt.samplerate;
236+
const sample_t sample_rate = _info->fmt.samplerate;
237237

238238
if (!state->fInit) {
239239
trace("GSF ERR: attempt to read from uninitialised plugin state\n");
@@ -315,7 +315,7 @@ int gsf_read(DB_fileinfo_t *_info, char *buffer, int nbytes) {
315315
}
316316

317317
int gsf_seek(DB_fileinfo_t *info, float seconds) {
318-
long sample = seconds * info->fmt.samplerate;
318+
sample_t sample = seconds * info->fmt.samplerate;
319319
return gsf_seek_sample(info, sample);
320320
}
321321

@@ -331,7 +331,7 @@ int gsf_seek_sample(DB_fileinfo_t *info, int sample) {
331331
state->readsample = 0;
332332
}
333333

334-
long to_seek = sample - state->readsample;
334+
sample_t to_seek = sample - state->readsample;
335335
size_t &in_buffer = state->output.bytes_in_buffer;
336336
while (to_seek > 0) {
337337
#ifdef BUILD_DEBUG
@@ -343,7 +343,7 @@ int gsf_seek_sample(DB_fileinfo_t *info, int sample) {
343343
#endif
344344
#endif
345345
if (in_buffer > 0) {
346-
long samples_in_buffer = in_buffer / 4;
346+
sample_t samples_in_buffer = in_buffer / 4;
347347
if (samples_in_buffer <= to_seek) {
348348
#ifdef BUILD_DEBUG
349349
tracedbg("GSF DEBUG: Discarding buffer\n");

0 commit comments

Comments
 (0)