Home
last modified time | relevance | path

Searched refs:track (Results 1 – 25 of 490) sorted by relevance

12345678910>>...20

/external/libmtp/examples/
Dtracks.c26 static void dump_trackinfo(LIBMTP_track_t *track) in dump_trackinfo() argument
28 printf("Track ID: %u\n", track->item_id); in dump_trackinfo()
29 if (track->title != NULL) in dump_trackinfo()
30 printf(" Title: %s\n", track->title); in dump_trackinfo()
31 if (track->artist != NULL) in dump_trackinfo()
32 printf(" Artist: %s\n", track->artist); in dump_trackinfo()
33 if (track->genre != NULL) in dump_trackinfo()
34 printf(" Genre: %s\n", track->genre); in dump_trackinfo()
35 if (track->composer != NULL) in dump_trackinfo()
36 printf(" Composer: %s\n", track->composer); in dump_trackinfo()
[all …]
Dgetplaylist.c33 LIBMTP_track_t *track; in dump_playlist() local
35 track = LIBMTP_Get_Trackmetadata(device, pl->tracks[i]); in dump_playlist()
36 if (track != NULL) { in dump_playlist()
37 printf(" %u: %s - %s\n", pl->tracks[i], track->artist, track->title); in dump_playlist()
38 LIBMTP_destroy_track_t(track); in dump_playlist()
/external/libgdx/backends/gdx-backend-moe/src/com/badlogic/gdx/backends/iosmoe/
DIOSMusic.java27 private final OALAudioTrack track; field in IOSMusic
30 public IOSMusic (OALAudioTrack track) { in IOSMusic() argument
31 this.track = track; in IOSMusic()
46 this.track.setDelegate(delegate); in IOSMusic()
51 if (track.paused()) { in play()
52 track.setPaused(false); in play()
53 } else if (!track.playing()) { in play()
54 track.play(); in play()
60 if (track.playing()) { in pause()
61 track.setPaused(true); in pause()
[all …]
/external/libgdx/backends/gdx-backend-robovm/src/com/badlogic/gdx/backends/iosrobovm/
DIOSMusic.java28 private final OALAudioTrack track; field in IOSMusic
31 public IOSMusic (OALAudioTrack track) { in IOSMusic() argument
32 this.track = track; in IOSMusic()
33 this.track.setDelegate(new AVAudioPlayerDelegateAdapter() { in IOSMusic()
51 if (track.isPaused()) { in play()
52 track.setPaused(false); in play()
53 } else if (!track.isPlaying()) { in play()
54 track.play(); in play()
60 if (track.isPlaying()) { in pause()
61 track.setPaused(true); in pause()
[all …]
/external/e2fsprogs/resize/
Dresource_track.c20 void init_resource_track(struct resource_track *track, const char *desc, in init_resource_track() argument
28 track->desc = desc; in init_resource_track()
29 track->brk_start = sbrk(0); in init_resource_track()
30 gettimeofday(&track->time_start, 0); in init_resource_track()
36 track->user_start = r.ru_utime; in init_resource_track()
37 track->system_start = r.ru_stime; in init_resource_track()
39 track->user_start.tv_sec = track->user_start.tv_usec = 0; in init_resource_track()
40 track->system_start.tv_sec = track->system_start.tv_usec = 0; in init_resource_track()
42 track->bytes_read = 0; in init_resource_track()
43 track->bytes_written = 0; in init_resource_track()
[all …]
/external/mp4parser/isoparser/src/main/java/com/googlecode/mp4parser/authoring/builder/
DFragmentedMp4Builder.java117 for (Track track : movie.getTracks()) { in createMoofMdat()
118 long[] intersects = intersectionFinder.sampleNumbers(track, movie); in createMoofMdat()
119 intersectionMap.put(track, intersects); in createMoofMdat()
131 for (Track track : sortedTracks) { in createMoofMdat()
132 … if (getAllowedHandlers().isEmpty() || getAllowedHandlers().contains(track.getHandler())) { in createMoofMdat()
133 long[] startSamples = intersectionMap.get(track); in createMoofMdat()
139 …g endSample = cycle + 1 < startSamples.length ? startSamples[cycle + 1] : track.getSamples().size(… in createMoofMdat()
143 boxes.add(createMoof(startSample, endSample, track, sequence)); in createMoofMdat()
144 boxes.add(createMdat(startSample, endSample, track, sequence++)); in createMoofMdat()
172 …protected Box createMdat(final long startSample, final long endSample, final Track track, final in…
[all …]
DDefaultMp4Builder.java88 for (Track track : movie.getTracks()) { in build()
90 List<ByteBuffer> samples = track.getSamples(); in build()
91 putSamples(track, samples); in build()
96 putSampleSizes(track, sizes); in build()
131 protected long[] putSampleSizes(Track track, long[] sizes) { in putSampleSizes() argument
132 return track2SampleSizes.put(track, sizes); in putSampleSizes()
135 protected List<ByteBuffer> putSamples(Track track, List<ByteBuffer> samples) { in putSamples() argument
136 return track2Sample.put(track, samples); in putSamples()
149 for (Track track : movie.getTracks()) { in createMovieBox()
150 …long tracksDuration = getDuration(track) * movieTimeScale / track.getTrackMetaData().getTimescale(… in createMovieBox()
[all …]
DSyncSampleIntersectFinderImpl.java64 public long[] sampleNumbers(Track track, Movie movie) { in sampleNumbers() argument
65 final CacheTuple key = new CacheTuple(track, movie); in sampleNumbers()
71 if ("vide".equals(track.getHandler())) { in sampleNumbers()
72 if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) { in sampleNumbers()
73 List<long[]> times = getSyncSamplesTimestamps(movie, track); in sampleNumbers()
74 …final long[] commonIndices = getCommonIndices(track.getSyncSamples(), getTimes(track, movie), trac… in sampleNumbers()
80 } else if ("soun".equals(track.getHandler())) { in sampleNumbers()
115 … AudioSampleEntry ase = (AudioSampleEntry) track.getSampleDescriptionBox().getSampleEntry(); in sampleNumbers()
116 TimeToSampleBox.Entry sttsEntry = track.getDecodingTimeEntries().get(0); in sampleNumbers()
137 long sc = track.getSamples().size(); in sampleNumbers()
[all …]
/external/webrtc/talk/app/webrtc/java/src/org/webrtc/
DMediaStream.java47 public boolean addTrack(AudioTrack track) { in addTrack() argument
48 if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) { in addTrack()
49 audioTracks.add(track); in addTrack()
55 public boolean addTrack(VideoTrack track) { in addTrack() argument
56 if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) { in addTrack()
57 videoTracks.add(track); in addTrack()
66 public boolean addPreservedTrack(VideoTrack track) { in addPreservedTrack() argument
67 if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) { in addPreservedTrack()
68 preservedVideoTracks.add(track); in addPreservedTrack()
74 public boolean removeTrack(AudioTrack track) { in removeTrack() argument
[all …]
DRtpSender.java39 long track = nativeGetTrack(nativeRtpSender); in RtpSender() local
41 cachedTrack = (track == 0) ? null : new MediaStreamTrack(track); in RtpSender()
49 public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) { in setTrack() argument
51 (track == null) ? 0 : track.nativeTrack)) { in setTrack()
57 cachedTrack = track; in setTrack()
62 public MediaStreamTrack track() { in track() method in RtpSender
/external/esd/include/
Daudiofile.h432 int afReadFrames (AFfilehandle, int track, void *buffer, int frameCount);
433 int afWriteFrames (AFfilehandle, int track, const void *buffer, int frameCount);
434 AFframecount afSeekFrame (AFfilehandle, int track, AFframecount frameoffset);
435 AFframecount afTellFrame (AFfilehandle, int track);
436 AFfileoffset afGetTrackBytes (AFfilehandle, int track);
437 float afGetFrameSize (AFfilehandle, int track, int expand3to4);
438 float afGetVirtualFrameSize (AFfilehandle, int track, int expand3to4);
442 void afInitAESChannelData (AFfilesetup, int track); /* obsolete */
443 void afInitAESChannelDataTo (AFfilesetup, int track, int willBeData);
444 int afGetAESChannelData (AFfilehandle, int track, unsigned char buf[24]);
[all …]
/external/webrtc/talk/app/webrtc/
Dmediastream.cc56 bool MediaStream::AddTrack(AudioTrackInterface* track) { in AddTrack() argument
57 return AddTrack<AudioTrackVector, AudioTrackInterface>(&audio_tracks_, track); in AddTrack()
60 bool MediaStream::AddTrack(VideoTrackInterface* track) { in AddTrack() argument
61 return AddTrack<VideoTrackVector, VideoTrackInterface>(&video_tracks_, track); in AddTrack()
64 bool MediaStream::RemoveTrack(AudioTrackInterface* track) { in RemoveTrack() argument
65 return RemoveTrack<AudioTrackVector>(&audio_tracks_, track); in RemoveTrack()
68 bool MediaStream::RemoveTrack(VideoTrackInterface* track) { in RemoveTrack() argument
69 return RemoveTrack<VideoTrackVector>(&video_tracks_, track); in RemoveTrack()
89 bool MediaStream::AddTrack(TrackVector* tracks, Track* track) { in AddTrack() argument
90 typename TrackVector::iterator it = FindTrack(tracks, track->id()); in AddTrack()
[all …]
Drtpsender.cc62 AudioRtpSender::AudioRtpSender(AudioTrackInterface* track, in AudioRtpSender() argument
66 : id_(track->id()), in AudioRtpSender()
70 track_(track), in AudioRtpSender()
71 cached_track_enabled_(track->enabled()), in AudioRtpSender()
100 bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) { in SetTrack() argument
105 if (track && track->kind() != MediaStreamTrackInterface::kAudioKind) { in SetTrack()
106 LOG(LS_ERROR) << "SetTrack called on audio RtpSender with " << track->kind() in SetTrack()
110 AudioTrackInterface* audio_track = static_cast<AudioTrackInterface*>(track); in SetTrack()
202 VideoRtpSender::VideoRtpSender(VideoTrackInterface* track, in VideoRtpSender() argument
205 : id_(track->id()), in VideoRtpSender()
[all …]
Drtpreceiver.cc34 AudioRtpReceiver::AudioRtpReceiver(AudioTrackInterface* track, in AudioRtpReceiver() argument
37 : id_(track->id()), in AudioRtpReceiver()
38 track_(track), in AudioRtpReceiver()
41 cached_track_enabled_(track->enabled()) { in AudioRtpReceiver()
85 VideoRtpReceiver::VideoRtpReceiver(VideoTrackInterface* track, in VideoRtpReceiver() argument
88 : id_(track->id()), track_(track), ssrc_(ssrc), provider_(provider) { in VideoRtpReceiver()
Dmediastream.h47 bool AddTrack(AudioTrackInterface* track) override;
48 bool AddTrack(VideoTrackInterface* track) override;
49 bool RemoveTrack(AudioTrackInterface* track) override;
50 bool RemoveTrack(VideoTrackInterface* track) override;
64 bool AddTrack(TrackVector* Tracks, Track* track);
66 bool RemoveTrack(TrackVector* Tracks, MediaStreamTrackInterface* track);
/external/mp4parser/isoparser/src/main/java/com/googlecode/mp4parser/authoring/
DMovie.java49 for (Track track : tracks) { in toString()
50 … s += "track_" + track.getTrackMetaData().getTrackId() + " (" + track.getHandler() + ") "; in toString()
59 for (Track track : tracks) { in getNextTrackId()
60 …nextTrackId = nextTrackId < track.getTrackMetaData().getTrackId() ? track.getTrackMetaData().getTr… in getNextTrackId()
67 for (Track track : tracks) {
68 if (track.getTrackMetaData().getTrackId() == trackId) {
69 return track;
78 for (Track track : this.getTracks()) {
79 timescale = gcd(track.getTrackMetaData().getTimescale(), timescale);
/external/mp4parser/isoparser/src/main/java/com/googlecode/mp4parser/authoring/adaptivestreaming/
DFlatPackageWriterImpl.java122 for (Track track : movieWithAdjustedTimescale.getTracks()) { in write()
123 String bitrate = Long.toString(manifestWriter.getBitrate(track)); in write()
124 long trackId = track.getTrackMetaData().getTrackId(); in write()
127 if (track.getMediaHeaderBox() instanceof SoundMediaHeaderBox) { in write()
130 } else if (track.getMediaHeaderBox() instanceof VideoMediaHeaderBox) { in write()
133 …System.err.println("Skipping Track with handler " + track.getHandler() + " and " + track.getMediaH… in write()
140 …long[] fragmentTimes = manifestWriter.calculateFragmentDurations(track, movieWithAdjustedTimescale… in write()
170 for (Track track : source.getTracks()) { in removeUnknownTracks()
171 if ("vide".equals(track.getHandler()) || "soun".equals(track.getHandler())) { in removeUnknownTracks()
172 nuMovie.addTrack(track); in removeUnknownTracks()
[all …]
DFlatManifestWriterImpl.java75 for (Track track : movie.getTracks()) { in getManifest()
76 if (track.getMediaHeaderBox() instanceof VideoMediaHeaderBox) { in getManifest()
77 …Durations = checkFragmentsAlign(videoFragmentsDurations, calculateFragmentDurations(track, movie)); in getManifest()
78 SampleDescriptionBox stsd = track.getSampleDescriptionBox(); in getManifest()
79 … videoQualities.add(getVideoQuality(track, (VisualSampleEntry) stsd.getSampleEntry())); in getManifest()
81 videoTimescale = track.getTrackMetaData().getTimescale(); in getManifest()
83 assert videoTimescale == track.getTrackMetaData().getTimescale(); in getManifest()
86 if (track.getMediaHeaderBox() instanceof SoundMediaHeaderBox) { in getManifest()
87 …Durations = checkFragmentsAlign(audioFragmentsDurations, calculateFragmentDurations(track, movie)); in getManifest()
88 SampleDescriptionBox stsd = track.getSampleDescriptionBox(); in getManifest()
[all …]
DAbstractManifestWriter.java42 public long[] calculateFragmentDurations(Track track, Movie movie) { in calculateFragmentDurations() argument
43 long[] startSamples = intersectionFinder.sampleNumbers(track, movie); in calculateFragmentDurations()
48 for (TimeToSampleBox.Entry entry : track.getDecodingTimeEntries()) { in calculateFragmentDurations()
65 public long getBitrate(Track track) { in getBitrate() argument
67 for (ByteBuffer sample : track.getSamples()) { in getBitrate()
71 … bitrate /= ((double) getDuration(track)) / track.getTrackMetaData().getTimescale(); // per second in getBitrate()
75 protected static long getDuration(Track track) { in getDuration() argument
77 for (TimeToSampleBox.Entry entry : track.getDecodingTimeEntries()) { in getDuration()
/external/e2fsprogs/e2fsck/
Dscantest.c52 void init_resource_track(struct resource_track *track) in init_resource_track() argument
56 track->brk_start = sbrk(0); in init_resource_track()
57 gettimeofday(&track->time_start, 0); in init_resource_track()
59 track->user_start = r.ru_utime; in init_resource_track()
60 track->system_start = r.ru_stime; in init_resource_track()
70 static void print_resource_track(struct resource_track *track) in print_resource_track() argument
79 (int) (((char *) sbrk(0)) - ((char *) track->brk_start)), in print_resource_track()
80 timeval_subtract(&time_end, &track->time_start), in print_resource_track()
81 timeval_subtract(&r.ru_utime, &track->user_start), in print_resource_track()
82 timeval_subtract(&r.ru_stime, &track->system_start)); in print_resource_track()
/external/libgdx/backends/gdx-backend-android/src/com/badlogic/gdx/backends/android/
DAndroidAudioDevice.java30 private final AudioTrack track; field in AndroidAudioDevice
45track = new AudioTrack(AudioManager.STREAM_MUSIC, samplingRate, isMono ? AudioFormat.CHANNEL_OUT_M… in AndroidAudioDevice()
47 track.play(); in AndroidAudioDevice()
53 track.stop(); in dispose()
54 track.release(); in dispose()
64 int writtenSamples = track.write(samples, offset, numSamples); in writeSamples()
66 writtenSamples += track.write(samples, offset + writtenSamples, numSamples - writtenSamples); in writeSamples()
82 int writtenSamples = track.write(buffer, 0, numSamples); in writeSamples()
84 writtenSamples += track.write(buffer, writtenSamples, numSamples - writtenSamples); in writeSamples()
94 track.setStereoVolume(volume, volume); in setVolume()
/external/llvm/include/llvm/IR/
DMetadataTracking.h44 static bool track(Metadata *&MD) { in track() function
45 return track(&MD, *MD, static_cast<Metadata *>(nullptr)); in track()
53 static bool track(void *Ref, Metadata &MD, Metadata &Owner) { in track() function
54 return track(Ref, MD, &Owner); in track()
62 static bool track(void *Ref, Metadata &MD, MetadataAsValue &Owner) { in track() function
63 return track(Ref, MD, &Owner); in track()
94 static bool track(void *Ref, Metadata &MD, OwnerTy Owner);
/external/webrtc/talk/app/webrtc/objc/
DRTCMediaStream.mm65 - (BOOL)addAudioTrack:(RTCAudioTrack*)track {
66 if (self.mediaStream->AddTrack(track.audioTrack)) {
67 [_audioTracks addObject:track];
73 - (BOOL)addVideoTrack:(RTCVideoTrack*)track {
74 if (self.mediaStream->AddTrack(track.nativeVideoTrack)) {
75 [_videoTracks addObject:track];
81 - (BOOL)removeAudioTrack:(RTCAudioTrack*)track {
82 NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:track];
85 if (index != NSNotFound && self.mediaStream->RemoveTrack(track.audioTrack)) {
92 - (BOOL)removeVideoTrack:(RTCVideoTrack*)track {
[all …]
/external/jmonkeyengine/engine/src/blender/com/jme3/scene/plugins/blender/animations/
DCalculationBone.java18 private BoneTrack track; field in CalculationBone
53 public CalculationBone(Bone bone, BoneTrack track) { in CalculationBone() argument
55 this.track = track; in CalculationBone()
56 this.translations = track.getTranslations(); in CalculationBone()
57 this.rotations = track.getRotations(); in CalculationBone()
58 this.scales = track.getScales(); in CalculationBone()
114 if(track != null) { in applyCalculatedTracks()
115 track.setKeyframes(track.getTimes(), translations, rotations, scales); in applyCalculatedTracks()
/external/mp4parser/isoparser/src/main/java/com/googlecode/mp4parser/authoring/tracks/
DAppendTrack.java44 for (Track track : tracks) { in AppendTrack()
47 stsd = track.getSampleDescriptionBox(); in AppendTrack()
51 track.getSampleDescriptionBox().getBox(Channels.newChannel(curBaos)); in AppendTrack()
57 SampleDescriptionBox curStsd = track.getSampleDescriptionBox(); in AppendTrack()
67 …w IOException("Cannot append " + track + " to " + tracks[0] + " since their Sample Description Box… in AppendTrack()
238 for (Track track : tracks) { in getSamples()
239 lists.addAll(track.getSamples()); in getSamples()
252 for (Track track : tracks) { in getDecodingTimeEntries()
253 lists.add(TimeToSampleBox.blowupTimeToSamples(track.getDecodingTimeEntries())); in getDecodingTimeEntries()
277 for (Track track : tracks) { in getCompositionTimeEntries()
[all …]

12345678910>>...20